@@ -27,10 +27,9 @@ import scala.collection.mutable.{ArrayBuffer, Map => SMap}
27
27
import scala .math ._
28
28
29
29
import org .apache .hadoop .hive .conf .HiveConf
30
+ import org .apache .hadoop .hive .metastore .api .FieldSchema
30
31
import org .apache .hadoop .hive .ql .metadata .Hive
31
- import org .apache .hadoop .hive .ql .processors .CommandProcessorFactory
32
32
import org .apache .hadoop .hive .ql .session .SessionState
33
- import org .apache .hadoop .hive .metastore .api .FieldSchema
34
33
import org .apache .hadoop .hive .shims .ShimLoader
35
34
import org .apache .hadoop .security .UserGroupInformation
36
35
import org .apache .hive .service .cli ._
@@ -39,9 +38,9 @@ import org.apache.hive.service.cli.session.HiveSession
39
38
40
39
import org .apache .spark .Logging
41
40
import org .apache .spark .sql .catalyst .types ._
42
- import org .apache .spark .sql .{Row => SparkRow , SchemaRDD }
43
- import org .apache .spark .sql .hive .{HiveContext , HiveMetastoreTypes }
44
41
import org .apache .spark .sql .hive .thriftserver .ReflectionUtils ._
42
+ import org .apache .spark .sql .hive .{HiveContext , HiveMetastoreTypes }
43
+ import org .apache .spark .sql .{SchemaRDD , Row => SparkRow }
45
44
46
45
/**
47
46
* A compatibility layer for interacting with Hive version 0.12.0.
@@ -100,6 +99,7 @@ private[hive] class SparkExecuteStatementOperation(
100
99
// Actually do need to catch Throwable as some failures don't inherit from Exception and
101
100
// HiveServer will silently swallow them.
102
101
case e : Throwable =>
102
+ setState(OperationState .ERROR )
103
103
logError(" Error executing query:" ,e)
104
104
throw new HiveSQLException (e.toString)
105
105
}
@@ -194,14 +194,12 @@ private[hive] class SparkExecuteStatementOperation(
194
194
try {
195
195
sqlOperationConf.verifyAndSet(confEntry.getKey, confEntry.getValue)
196
196
}
197
- catch {
198
- case e : IllegalArgumentException => {
199
- throw new HiveSQLException (" Error applying statement specific settings" , e)
200
- }
197
+ catch { case e : IllegalArgumentException =>
198
+ throw new HiveSQLException (" Error applying statement specific settings" , e)
201
199
}
202
200
}
203
201
}
204
- return sqlOperationConf
202
+ sqlOperationConf
205
203
}
206
204
207
205
def run (): Unit = {
@@ -219,7 +217,7 @@ private[hive] class SparkExecuteStatementOperation(
219
217
val currentUGI : UserGroupInformation = ShimLoader .getHadoopShims.getUGIForConf(opConfig)
220
218
221
219
val backgroundOperation : Runnable = new Runnable {
222
- def run {
220
+ def run () {
223
221
val doAsAction : PrivilegedExceptionAction [AnyRef ] =
224
222
new PrivilegedExceptionAction [AnyRef ] {
225
223
def run : AnyRef = {
@@ -228,23 +226,19 @@ private[hive] class SparkExecuteStatementOperation(
228
226
try {
229
227
runInternal(statement)
230
228
}
231
- catch {
232
- case e : HiveSQLException => {
233
- setOperationException(e)
234
- logError(" Error running hive query: " , e)
235
- }
229
+ catch { case e : HiveSQLException =>
230
+ setOperationException(e)
231
+ logError(" Error running hive query: " , e)
236
232
}
237
- return null
233
+ null
238
234
}
239
235
}
240
236
try {
241
237
ShimLoader .getHadoopShims.doAs(currentUGI, doAsAction)
242
238
}
243
- catch {
244
- case e : Exception => {
245
- setOperationException(new HiveSQLException (e))
246
- logError(" Error running hive query as user : " + currentUGI.getShortUserName, e)
247
- }
239
+ catch { case e : Exception =>
240
+ setOperationException(new HiveSQLException (e))
241
+ logError(" Error running hive query as user : " + currentUGI.getShortUserName, e)
248
242
}
249
243
setState(OperationState .FINISHED )
250
244
}
0 commit comments