@@ -19,6 +19,8 @@ package org.apache.spark.sql.hive.thriftserver
19
19
20
20
import java .io .File
21
21
import java .net .URL
22
+ import java .nio .charset .StandardCharsets
23
+ import java .nio .file .{Files , Paths }
22
24
import java .sql .{Date , DriverManager , Statement }
23
25
24
26
import scala .collection .mutable .ArrayBuffer
@@ -54,7 +56,7 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
54
56
override def mode : ServerMode .Value = ServerMode .binary
55
57
56
58
private def withCLIServiceClient (f : ThriftCLIServiceClient => Unit ): Unit = {
57
- // Transport creation logics below mimics HiveConnection.createBinaryTransport
59
+ // Transport creation logic below mimics HiveConnection.createBinaryTransport
58
60
val rawTransport = new TSocket (" localhost" , serverPort)
59
61
val user = System .getProperty(" user.name" )
60
62
val transport = PlainSaslHelper .getPlainTransport(user, " anonymous" , rawTransport)
@@ -391,10 +393,10 @@ abstract class HiveThriftJdbcTest extends HiveThriftServer2Test {
391
393
val statements = connections.map(_.createStatement())
392
394
393
395
try {
394
- statements.zip(fs).map { case (s, f) => f(s) }
396
+ statements.zip(fs).foreach { case (s, f) => f(s) }
395
397
} finally {
396
- statements.map (_.close())
397
- connections.map (_.close())
398
+ statements.foreach (_.close())
399
+ connections.foreach (_.close())
398
400
}
399
401
}
400
402
@@ -433,15 +435,32 @@ abstract class HiveThriftServer2Test extends SparkFunSuite with BeforeAndAfterAl
433
435
ConfVars .HIVE_SERVER2_THRIFT_HTTP_PORT
434
436
}
435
437
438
+ val driverClassPath = {
439
+ // Writes a temporary log4j.properties and prepend it to driver classpath, so that it
440
+ // overrides all other potential log4j configurations contained in other dependency jar files.
441
+ val tempLog4jConf = Utils .createTempDir().getCanonicalPath
442
+
443
+ Files .write(
444
+ Paths .get(s " $tempLog4jConf/log4j.properties " ),
445
+ """ log4j.rootCategory=INFO, console
446
+ |log4j.appender.console=org.apache.log4j.ConsoleAppender
447
+ |log4j.appender.console.target=System.err
448
+ |log4j.appender.console.layout=org.apache.log4j.PatternLayout
449
+ |log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
450
+ """ .stripMargin.getBytes(StandardCharsets .UTF_8 ))
451
+
452
+ tempLog4jConf + File .pathSeparator + sys.props(" java.class.path" )
453
+ }
454
+
436
455
s """ $startScript
437
456
| --master local
438
- | --hiveconf hive.root.logger=INFO,console
439
457
| --hiveconf ${ConfVars .METASTORECONNECTURLKEY }= $metastoreJdbcUri
440
458
| --hiveconf ${ConfVars .METASTOREWAREHOUSE }= $warehousePath
441
459
| --hiveconf ${ConfVars .HIVE_SERVER2_THRIFT_BIND_HOST }=localhost
442
460
| --hiveconf ${ConfVars .HIVE_SERVER2_TRANSPORT_MODE }= $mode
443
461
| --hiveconf $portConf= $port
444
- | --driver-class-path ${sys.props(" java.class.path" )}
462
+ | --driver-class-path $driverClassPath
463
+ | --driver-java-options -Dlog4j.debug
445
464
| --conf spark.ui.enabled=false
446
465
""" .stripMargin.split(" \\ s+" ).toSeq
447
466
}
0 commit comments