Skip to content

Commit 578d167

Browse files
author
Jacky Li
committed
make caseSensitive configurable
1 parent 6dfe38a commit 578d167

File tree

2 files changed

+9
-1
lines changed

2 files changed

+9
-1
lines changed

sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,8 @@ private[spark] object SQLConf {
4646
// This is only used for the thriftserver
4747
val THRIFTSERVER_POOL = "spark.sql.thriftserver.scheduler.pool"
4848

49+
val CASE_SENSITIVE = "spark.sql.caseSensitive"
50+
4951
object Deprecated {
5052
val MAPRED_REDUCE_TASKS = "mapred.reduce.tasks"
5153
}
@@ -148,6 +150,12 @@ private[sql] trait SQLConf {
148150
private[spark] def columnNameOfCorruptRecord: String =
149151
getConf(COLUMN_NAME_OF_CORRUPT_RECORD, "_corrupt_record")
150152

153+
/**
154+
* When set to true, analyzer is case sensitive
155+
*/
156+
private[spark] def caseSensitive: Boolean =
157+
getConf(CASE_SENSITIVE, "true").toBoolean
158+
151159
/** ********************** SQLConf functionality methods ************ */
152160

153161
/** Set Spark SQL configuration properties. */

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
6565

6666
@transient
6767
protected[sql] lazy val analyzer: Analyzer =
68-
new Analyzer(catalog, functionRegistry, caseSensitive = true)
68+
new Analyzer(catalog, functionRegistry, caseSensitive)
6969

7070
@transient
7171
protected[sql] lazy val optimizer: Optimizer = DefaultOptimizer

0 commit comments

Comments
 (0)