Skip to content

Commit d4b724f

Browse files
committed
address michael's comment
1 parent af512c7 commit d4b724f

File tree

10 files changed

+13
-84
lines changed

10 files changed

+13
-84
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystConf.scala

Lines changed: 3 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -17,18 +17,8 @@
1717

1818
package org.apache.spark.sql.catalyst
1919

20-
import scala.collection.immutable
21-
22-
private[spark] object CatalystConf{
23-
val CASE_SENSITIVE = "spark.sql.caseSensitive"
24-
}
25-
2620
private[spark] trait CatalystConf {
2721
def caseSensitiveAnalysis: Boolean
28-
def setConf(key: String, value: String) : Unit
29-
def getConf(key: String) : String
30-
def getConf(key: String, defaultValue: String) : String
31-
def getAllConfs: immutable.Map[String, String]
3222
}
3323

3424
/**
@@ -39,20 +29,7 @@ object EmptyConf extends CatalystConf {
3929
override def caseSensitiveAnalysis: Boolean = {
4030
throw new UnsupportedOperationException
4131
}
42-
43-
override def setConf(key: String, value: String) : Unit = {
44-
throw new UnsupportedOperationException
45-
}
46-
47-
override def getConf(key: String) : String = {
48-
throw new UnsupportedOperationException
49-
}
50-
51-
override def getConf(key: String, defaultValue: String) : String = {
52-
throw new UnsupportedOperationException
53-
}
54-
55-
override def getAllConfs: immutable.Map[String, String] = {
56-
throw new UnsupportedOperationException
57-
}
5832
}
33+
34+
/** A CatalystConf that can be used for local testing. */
35+
case class SimpleCatalystConf(caseSensitiveAnalysis: Boolean) extends CatalystConf

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,11 +20,10 @@ package org.apache.spark.sql.catalyst.analysis
2020
import scala.collection.mutable.ArrayBuffer
2121

2222
import org.apache.spark.sql.AnalysisException
23-
import org.apache.spark.sql.catalyst.CatalystConf
23+
import org.apache.spark.sql.catalyst.{SimpleCatalystConf, CatalystConf}
2424
import org.apache.spark.sql.catalyst.expressions._
2525
import org.apache.spark.sql.catalyst.plans.logical._
2626
import org.apache.spark.sql.catalyst.rules._
27-
import org.apache.spark.sql.catalyst.test.SimpleCatalystConf
2827
import org.apache.spark.sql.types._
2928
import org.apache.spark.util.collection.OpenHashSet
3029

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/test/SimpleCatalystConf.scala

Lines changed: 0 additions & 47 deletions
This file was deleted.

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,7 @@ import org.apache.spark.sql.AnalysisException
2323
import org.apache.spark.sql.catalyst.expressions._
2424
import org.apache.spark.sql.catalyst.plans.logical._
2525
import org.apache.spark.sql.types._
26-
import org.apache.spark.sql.catalyst.CatalystConf
27-
import org.apache.spark.sql.catalyst.test.SimpleCatalystConf
26+
import org.apache.spark.sql.catalyst.SimpleCatalystConf
2827
import org.apache.spark.sql.catalyst.dsl.expressions._
2928
import org.apache.spark.sql.catalyst.dsl.plans._
3029

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ import org.scalatest.{BeforeAndAfter, FunSuite}
2121

2222
import org.apache.spark.sql.catalyst.expressions._
2323
import org.apache.spark.sql.catalyst.plans.logical.{Union, Project, LocalRelation}
24-
import org.apache.spark.sql.catalyst.test.SimpleCatalystConf
2524
import org.apache.spark.sql.types._
25+
import org.apache.spark.sql.catalyst.SimpleCatalystConf
2626

2727
class DecimalPrecisionSuite extends FunSuite with BeforeAndAfter {
2828
val conf = new SimpleCatalystConf(true)

sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ private[spark] object SQLConf {
3434
val CODEGEN_ENABLED = "spark.sql.codegen"
3535
val UNSAFE_ENABLED = "spark.sql.unsafe.enabled"
3636
val DIALECT = "spark.sql.dialect"
37+
val CASE_SENSITIVE = "spark.sql.caseSensitive"
3738

3839
val PARQUET_BINARY_AS_STRING = "spark.sql.parquet.binaryAsString"
3940
val PARQUET_INT96_AS_TIMESTAMP = "spark.sql.parquet.int96AsTimestamp"
@@ -164,7 +165,7 @@ private[sql] class SQLConf extends Serializable with CatalystConf {
164165
/**
165166
* caseSensitive analysis true by default
166167
*/
167-
def caseSensitiveAnalysis: Boolean = getConf(CatalystConf.CASE_SENSITIVE, "true").toBoolean
168+
def caseSensitiveAnalysis: Boolean = getConf(SQLConf.CASE_SENSITIVE, "true").toBoolean
168169

169170
/**
170171
* When set to true, Spark SQL will use managed memory for certain operations. This option only

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1279,12 +1279,12 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
12791279
}
12801280

12811281
test("SPARK-4699 case sensitivity SQL query") {
1282-
setConf(CatalystConf.CASE_SENSITIVE, "false")
1282+
setConf(SQLConf.CASE_SENSITIVE, "false")
12831283
val data = TestData(1, "val_1") :: TestData(2, "val_2") :: Nil
12841284
val rdd = sparkContext.parallelize((0 to 1).map(i => data(i)))
12851285
rdd.toDF().registerTempTable("testTable1")
12861286
checkAnswer(sql("SELECT VALUE FROM TESTTABLE1 where KEY = 1"), Row("val_1"))
1287-
setConf(CatalystConf.CASE_SENSITIVE, "true")
1287+
setConf(SQLConf.CASE_SENSITIVE, "true")
12881288
}
12891289

12901290
test("SPARK-6145: ORDER BY test for nested fields") {

sql/core/src/test/scala/org/apache/spark/sql/sources/DataSourceTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,5 +26,5 @@ abstract class DataSourceTest extends QueryTest with BeforeAndAfter {
2626
// We want to test some edge cases.
2727
implicit val caseInsensisitiveContext = new SQLContext(TestSQLContext.sparkContext)
2828

29-
caseInsensisitiveContext.setConf(CatalystConf.CASE_SENSITIVE, "false")
29+
caseInsensisitiveContext.setConf(SQLConf.CASE_SENSITIVE, "false")
3030
}

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -352,7 +352,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
352352
protected[sql] override lazy val conf: SQLConf = new SQLConf {
353353
override def dialect: String = getConf(SQLConf.DIALECT, "hiveql")
354354
override def caseSensitiveAnalysis: Boolean =
355-
getConf(CatalystConf.CASE_SENSITIVE, "false").toBoolean
355+
getConf(SQLConf.CASE_SENSITIVE, "false").toBoolean
356356
}
357357

358358
/**

sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
108108
// The super.getConf(SQLConf.DIALECT) is "sql" by default, we need to set it as "hiveql"
109109
override def dialect: String = super.getConf(SQLConf.DIALECT, "hiveql")
110110
override def caseSensitiveAnalysis: Boolean =
111-
getConf(CatalystConf.CASE_SENSITIVE, "false").toBoolean
111+
getConf(SQLConf.CASE_SENSITIVE, "false").toBoolean
112112
}
113113
}
114114

0 commit comments

Comments
 (0)