Skip to content

Commit 5472b08

Browse files
committed
fix compile issue
1 parent 56034ca commit 5472b08

File tree

6 files changed

+18
-20
lines changed

6 files changed

+18
-20
lines changed

pom.xml

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,7 @@
192192
<id>central</id>
193193
<!-- This should be at top, it makes maven try the central repo first and then others and hence faster dep resolution -->
194194
<name>Maven Repository</name>
195-
<url>https://repo1.maven.org/maven2</url>
195+
<url>http://repo1.maven.org/maven2</url>
196196
<releases>
197197
<enabled>true</enabled>
198198
</releases>
@@ -203,7 +203,7 @@
203203
<repository>
204204
<id>apache-repo</id>
205205
<name>Apache Repository</name>
206-
<url>https://repository.apache.org/content/repositories/releases</url>
206+
<url>http://repository.apache.org/content/repositories/releases</url>
207207
<releases>
208208
<enabled>true</enabled>
209209
</releases>
@@ -214,7 +214,7 @@
214214
<repository>
215215
<id>jboss-repo</id>
216216
<name>JBoss Repository</name>
217-
<url>https://repository.jboss.org/nexus/content/repositories/releases</url>
217+
<url>http://repository.jboss.org/nexus/content/repositories/releases</url>
218218
<releases>
219219
<enabled>true</enabled>
220220
</releases>
@@ -225,7 +225,7 @@
225225
<repository>
226226
<id>mqtt-repo</id>
227227
<name>MQTT Repository</name>
228-
<url>https://repo.eclipse.org/content/repositories/paho-releases</url>
228+
<url>http://repo.eclipse.org/content/repositories/paho-releases</url>
229229
<releases>
230230
<enabled>true</enabled>
231231
</releases>
@@ -236,7 +236,7 @@
236236
<repository>
237237
<id>cloudera-repo</id>
238238
<name>Cloudera Repository</name>
239-
<url>https://repository.cloudera.com/artifactory/cloudera-repos</url>
239+
<url>http://repository.cloudera.com/artifactory/cloudera-repos</url>
240240
<releases>
241241
<enabled>true</enabled>
242242
</releases>
@@ -258,7 +258,7 @@
258258
<repository>
259259
<id>spring-releases</id>
260260
<name>Spring Release Repository</name>
261-
<url>https://repo.spring.io/libs-release</url>
261+
<url>http://repo.spring.io/libs-release</url>
262262
<releases>
263263
<enabled>true</enabled>
264264
</releases>
@@ -270,7 +270,7 @@
270270
<pluginRepositories>
271271
<pluginRepository>
272272
<id>central</id>
273-
<url>https://repo1.maven.org/maven2</url>
273+
<url>http://repo1.maven.org/maven2</url>
274274
<releases>
275275
<enabled>true</enabled>
276276
</releases>

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystConf.scala

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -36,19 +36,21 @@ private[spark] trait CatalystConf {
3636
* relations are already filled in and the analyser needs only to resolve attribute references.
3737
*/
3838
object EmptyConf extends CatalystConf {
39-
def setConf(key: String, value: String) : Unit = {
39+
override def caseSensitiveAnalysis: Boolean = true
40+
41+
override def setConf(key: String, value: String) : Unit = {
4042
throw new UnsupportedOperationException
4143
}
4244

43-
def getConf(key: String) : String = {
45+
override def getConf(key: String) : String = {
4446
throw new UnsupportedOperationException
4547
}
4648

47-
def getConf(key: String, defaultValue: String) : String = {
49+
override def getConf(key: String, defaultValue: String) : String = {
4850
throw new UnsupportedOperationException
4951
}
5052

51-
def getAllConfs: immutable.Map[String, String] = {
53+
override def getAllConfs: immutable.Map[String, String] = {
5254
throw new UnsupportedOperationException
5355
}
5456
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ import org.apache.spark.sql.types._
3131
* when all relations are already filled in and the analyzer needs only to resolve attribute
3232
* references.
3333
*/
34-
object SimpleAnalyzer extends Analyzer(EmptyCatalog, EmptyFunctionRegistry, new SimpleConf)
34+
object SimpleAnalyzer extends Analyzer(EmptyCatalog, EmptyFunctionRegistry, new SimpleConf(true))
3535

3636
/**
3737
* Provides a logical query plan analyzer, which translates [[UnresolvedAttribute]]s and

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/test/SimpleConf.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,11 @@ import scala.collection.immutable
2323
import scala.collection.mutable
2424

2525
/** A CatalystConf that can be used for local testing. */
26-
class SimpleConf(caseSensitiveAnalysis: Boolean) extends CatalystConf {
26+
class SimpleConf(caseSensitive: Boolean) extends CatalystConf {
2727
val map = mutable.Map[String, String]()
2828

29+
def caseSensitiveAnalysis: Boolean = caseSensitive
30+
2931
def setConf(key: String, value: String) : Unit = {
3032
map.put(key, value)
3133
}

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.test.SimpleConf
2424
import org.scalatest.{BeforeAndAfter, FunSuite}
2525

2626
class DecimalPrecisionSuite extends FunSuite with BeforeAndAfter {
27-
val conf = new SimpleConf
27+
val conf = new SimpleConf(true)
2828
val catalog = new SimpleCatalog(conf)
2929
val analyzer = new Analyzer(catalog, EmptyFunctionRegistry, conf)
3030

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -21,13 +21,7 @@ import org.scalatest.BeforeAndAfterAll
2121

2222
import org.apache.spark.sql.execution.GeneratedAggregate
2323
import org.apache.spark.sql.functions._
24-
<<<<<<< HEAD
2524
import org.apache.spark.sql.catalyst.CatalystConf
26-
import org.apache.spark.sql.catalyst.errors.TreeNodeException
27-
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
28-
import org.apache.spark.sql.types._
29-
=======
30-
>>>>>>> 254e0509762937acc9c72b432d5d953bf72c3c52
3125
import org.apache.spark.sql.TestData._
3226
import org.apache.spark.sql.test.TestSQLContext
3327
import org.apache.spark.sql.test.TestSQLContext.{udf => _, _}

0 commit comments

Comments
 (0)