Skip to content

Commit 75dda33

Browse files
committed
Revert "[SPARK-8020] Spark SQL in spark-defaults.conf make metadataHive get constructed too early"
This reverts commit 91f6be8.
1 parent 91f6be8 commit 75dda33

File tree

2 files changed

+4
-66
lines changed

2 files changed

+4
-66
lines changed

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 3 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -182,28 +182,9 @@ class SQLContext(@transient val sparkContext: SparkContext)
182182
conf.dialect
183183
}
184184

185-
{
186-
// We extract spark sql settings from SparkContext's conf and put them to
187-
// Spark SQL's conf.
188-
// First, we populate the SQLConf (conf). So, we can make sure that other values using
189-
// those settings in their construction can get the correct settings.
190-
// For example, metadataHive in HiveContext may need both spark.sql.hive.metastore.version
191-
// and spark.sql.hive.metastore.jars to get correctly constructed.
192-
val properties = new Properties
193-
sparkContext.getConf.getAll.foreach {
194-
case (key, value) if key.startsWith("spark.sql") => properties.setProperty(key, value)
195-
case _ =>
196-
}
197-
// We directly put those settings to conf to avoid of calling setConf, which may have
198-
// side-effects. For example, in HiveContext, setConf may cause executionHive and metadataHive
199-
// get constructed. If we call setConf directly, the constructed metadataHive may have
200-
// wrong settings, or the construction may fail.
201-
conf.setConf(properties)
202-
// After we have populated SQLConf, we call setConf to populate other confs in the subclass
203-
// (e.g. hiveconf in HiveContext).
204-
properties.foreach {
205-
case (key, value) => setConf(key, value)
206-
}
185+
sparkContext.getConf.getAll.foreach {
186+
case (key, value) if key.startsWith("spark.sql") => setConf(key, value)
187+
case _ =>
207188
}
208189

209190
@transient

sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala

Lines changed: 1 addition & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,7 @@
1717

1818
package org.apache.spark.sql.hive.client
1919

20-
import org.apache.spark.sql.hive.HiveContext
21-
import org.apache.spark.{Logging, SparkConf, SparkContext, SparkFunSuite}
20+
import org.apache.spark.{Logging, SparkFunSuite}
2221
import org.apache.spark.sql.catalyst.util.quietly
2322
import org.apache.spark.util.Utils
2423

@@ -38,48 +37,6 @@ class VersionsSuite extends SparkFunSuite with Logging {
3837
"hive.metastore.warehouse.dir" -> warehousePath.toString)
3938
}
4039

41-
test("SPARK-8020: successfully create a HiveContext with metastore settings in Spark conf.") {
42-
val sparkConf =
43-
new SparkConf() {
44-
// We are not really clone it. We need to keep the custom getAll.
45-
override def clone: SparkConf = this
46-
47-
override def getAll: Array[(String, String)] = {
48-
val allSettings = super.getAll
49-
val metastoreVersion = get("spark.sql.hive.metastore.version")
50-
val metastoreJars = get("spark.sql.hive.metastore.jars")
51-
52-
val others = allSettings.filterNot { case (key, _) =>
53-
key == "spark.sql.hive.metastore.version" || key == "spark.sql.hive.metastore.jars"
54-
}
55-
56-
// Put metastore.version to the first one. It is needed to trigger the exception
57-
// caused by SPARK-8020. Other problems triggered by SPARK-8020
58-
// (e.g. using Hive 0.13.1's metastore client to connect to the a 0.12 metastore)
59-
// are not easy to test.
60-
Array(
61-
("spark.sql.hive.metastore.version" -> metastoreVersion),
62-
("spark.sql.hive.metastore.jars" -> metastoreJars)) ++ others
63-
}
64-
}
65-
sparkConf
66-
.set("spark.sql.hive.metastore.version", "12")
67-
.set("spark.sql.hive.metastore.jars", "maven")
68-
69-
val hiveContext = new HiveContext(
70-
new SparkContext(
71-
"local[2]",
72-
"TestSQLContextInVersionsSuite",
73-
sparkConf)) {
74-
75-
protected override def configure(): Map[String, String] = buildConf
76-
77-
}
78-
79-
// Make sure all metastore related lazy vals got created.
80-
hiveContext.tables()
81-
}
82-
8340
test("success sanity check") {
8441
val badClient = IsolatedClientLoader.forVersion("13", buildConf()).client
8542
val db = new HiveDatabase("default", "")

0 commit comments

Comments
 (0)