Skip to content

Commit d622af7

Browse files
author
Andrew Or
committed
Merge branch 'master' of github.com:apache/spark into demarcate-tests
2 parents 017c8ba + 4782e13 commit d622af7

File tree

1 file changed

+25
-6
lines changed

1 file changed

+25
-6
lines changed

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala

Lines changed: 25 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ package org.apache.spark.sql.hive.thriftserver
1919

2020
import java.io.File
2121
import java.net.URL
22+
import java.nio.charset.StandardCharsets
23+
import java.nio.file.{Files, Paths}
2224
import java.sql.{Date, DriverManager, Statement}
2325

2426
import scala.collection.mutable.ArrayBuffer
@@ -54,7 +56,7 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
5456
override def mode: ServerMode.Value = ServerMode.binary
5557

5658
private def withCLIServiceClient(f: ThriftCLIServiceClient => Unit): Unit = {
57-
// Transport creation logics below mimics HiveConnection.createBinaryTransport
59+
// Transport creation logic below mimics HiveConnection.createBinaryTransport
5860
val rawTransport = new TSocket("localhost", serverPort)
5961
val user = System.getProperty("user.name")
6062
val transport = PlainSaslHelper.getPlainTransport(user, "anonymous", rawTransport)
@@ -391,10 +393,10 @@ abstract class HiveThriftJdbcTest extends HiveThriftServer2Test {
391393
val statements = connections.map(_.createStatement())
392394

393395
try {
394-
statements.zip(fs).map { case (s, f) => f(s) }
396+
statements.zip(fs).foreach { case (s, f) => f(s) }
395397
} finally {
396-
statements.map(_.close())
397-
connections.map(_.close())
398+
statements.foreach(_.close())
399+
connections.foreach(_.close())
398400
}
399401
}
400402

@@ -433,15 +435,32 @@ abstract class HiveThriftServer2Test extends SparkFunSuite with BeforeAndAfterAl
433435
ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT
434436
}
435437

438+
val driverClassPath = {
439+
// Writes a temporary log4j.properties and prepend it to driver classpath, so that it
440+
// overrides all other potential log4j configurations contained in other dependency jar files.
441+
val tempLog4jConf = Utils.createTempDir().getCanonicalPath
442+
443+
Files.write(
444+
Paths.get(s"$tempLog4jConf/log4j.properties"),
445+
"""log4j.rootCategory=INFO, console
446+
|log4j.appender.console=org.apache.log4j.ConsoleAppender
447+
|log4j.appender.console.target=System.err
448+
|log4j.appender.console.layout=org.apache.log4j.PatternLayout
449+
|log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
450+
""".stripMargin.getBytes(StandardCharsets.UTF_8))
451+
452+
tempLog4jConf + File.pathSeparator + sys.props("java.class.path")
453+
}
454+
436455
s"""$startScript
437456
| --master local
438-
| --hiveconf hive.root.logger=INFO,console
439457
| --hiveconf ${ConfVars.METASTORECONNECTURLKEY}=$metastoreJdbcUri
440458
| --hiveconf ${ConfVars.METASTOREWAREHOUSE}=$warehousePath
441459
| --hiveconf ${ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST}=localhost
442460
| --hiveconf ${ConfVars.HIVE_SERVER2_TRANSPORT_MODE}=$mode
443461
| --hiveconf $portConf=$port
444-
| --driver-class-path ${sys.props("java.class.path")}
462+
| --driver-class-path $driverClassPath
463+
| --driver-java-options -Dlog4j.debug
445464
| --conf spark.ui.enabled=false
446465
""".stripMargin.split("\\s+").toSeq
447466
}

0 commit comments

Comments
 (0)