Skip to content

Commit 1e5e454

Browse files
committed
Fixed a bug with symbol conversion.
1 parent 2ca74db commit 1e5e454

File tree

5 files changed

+22
-17
lines changed

5 files changed

+22
-17
lines changed

sql/core/src/main/scala/org/apache/spark/sql/Literal.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,11 @@ object Literal {
6969
* data type is not supported by SparkSQL.
7070
*/
7171
protected[sql] def anyToLiteral(literal: Any): Column = {
72+
// If the literal is a symbol, convert it into a Column.
73+
if (literal.isInstanceOf[Symbol]) {
74+
return dsl.symbolToColumn(literal.asInstanceOf[Symbol])
75+
}
76+
7277
val literalExpr = literal match {
7378
case v: Int => LiteralExpr(v, IntegerType)
7479
case v: Long => LiteralExpr(v, LongType)

sql/core/src/test/scala/org/apache/spark/sql/DslQuerySuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -118,19 +118,19 @@ class DslQuerySuite extends QueryTest {
118118

119119
checkAnswer(
120120
arrayData.orderBy('data.getItem(0).asc),
121-
arrayData.toSchemaRDD.collect().sortBy(_.getAs[Seq[Int]](0)(0)).toSeq)
121+
arrayData.toDF.collect().sortBy(_.getAs[Seq[Int]](0)(0)).toSeq)
122122

123123
checkAnswer(
124124
arrayData.orderBy('data.getItem(0).desc),
125-
arrayData.toSchemaRDD.collect().sortBy(_.getAs[Seq[Int]](0)(0)).reverse.toSeq)
125+
arrayData.toDF.collect().sortBy(_.getAs[Seq[Int]](0)(0)).reverse.toSeq)
126126

127127
checkAnswer(
128128
arrayData.orderBy('data.getItem(1).asc),
129-
arrayData.toSchemaRDD.collect().sortBy(_.getAs[Seq[Int]](0)(1)).toSeq)
129+
arrayData.toDF.collect().sortBy(_.getAs[Seq[Int]](0)(1)).toSeq)
130130

131131
checkAnswer(
132132
arrayData.orderBy('data.getItem(1).desc),
133-
arrayData.toSchemaRDD.collect().sortBy(_.getAs[Seq[Int]](0)(1)).reverse.toSeq)
133+
arrayData.toDF.collect().sortBy(_.getAs[Seq[Int]](0)(1)).reverse.toSeq)
134134
}
135135

136136
test("limit") {

sql/core/src/test/scala/org/apache/spark/sql/TestData.scala

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -30,11 +30,11 @@ case class TestData(key: Int, value: String)
3030

3131
object TestData {
3232
val testData = TestSQLContext.sparkContext.parallelize(
33-
(1 to 100).map(i => TestData(i, i.toString))).toSchemaRDD
33+
(1 to 100).map(i => TestData(i, i.toString))).toDF
3434
testData.registerTempTable("testData")
3535

3636
val negativeData = TestSQLContext.sparkContext.parallelize(
37-
(1 to 100).map(i => TestData(-i, (-i).toString))).toSchemaRDD
37+
(1 to 100).map(i => TestData(-i, (-i).toString))).toDF
3838
negativeData.registerTempTable("negativeData")
3939

4040
case class LargeAndSmallInts(a: Int, b: Int)
@@ -45,7 +45,7 @@ object TestData {
4545
LargeAndSmallInts(2147483645, 1) ::
4646
LargeAndSmallInts(2, 2) ::
4747
LargeAndSmallInts(2147483646, 1) ::
48-
LargeAndSmallInts(3, 2) :: Nil).toSchemaRDD
48+
LargeAndSmallInts(3, 2) :: Nil).toDF
4949
largeAndSmallInts.registerTempTable("largeAndSmallInts")
5050

5151
case class TestData2(a: Int, b: Int)
@@ -56,7 +56,7 @@ object TestData {
5656
TestData2(2, 1) ::
5757
TestData2(2, 2) ::
5858
TestData2(3, 1) ::
59-
TestData2(3, 2) :: Nil, 2).toSchemaRDD
59+
TestData2(3, 2) :: Nil, 2).toDF
6060
testData2.registerTempTable("testData2")
6161

6262
case class DecimalData(a: BigDecimal, b: BigDecimal)
@@ -68,7 +68,7 @@ object TestData {
6868
DecimalData(2, 1) ::
6969
DecimalData(2, 2) ::
7070
DecimalData(3, 1) ::
71-
DecimalData(3, 2) :: Nil).toSchemaRDD
71+
DecimalData(3, 2) :: Nil).toDF
7272
decimalData.registerTempTable("decimalData")
7373

7474
case class BinaryData(a: Array[Byte], b: Int)
@@ -78,14 +78,14 @@ object TestData {
7878
BinaryData("22".getBytes(), 5) ::
7979
BinaryData("122".getBytes(), 3) ::
8080
BinaryData("121".getBytes(), 2) ::
81-
BinaryData("123".getBytes(), 4) :: Nil).toSchemaRDD
81+
BinaryData("123".getBytes(), 4) :: Nil).toDF
8282
binaryData.registerTempTable("binaryData")
8383

8484
case class TestData3(a: Int, b: Option[Int])
8585
val testData3 =
8686
TestSQLContext.sparkContext.parallelize(
8787
TestData3(1, None) ::
88-
TestData3(2, Some(2)) :: Nil).toSchemaRDD
88+
TestData3(2, Some(2)) :: Nil).toDF
8989
testData3.registerTempTable("testData3")
9090

9191
val emptyTableData = logical.LocalRelation($"a".int, $"b".int)
@@ -98,7 +98,7 @@ object TestData {
9898
UpperCaseData(3, "C") ::
9999
UpperCaseData(4, "D") ::
100100
UpperCaseData(5, "E") ::
101-
UpperCaseData(6, "F") :: Nil).toSchemaRDD
101+
UpperCaseData(6, "F") :: Nil).toDF
102102
upperCaseData.registerTempTable("upperCaseData")
103103

104104
case class LowerCaseData(n: Int, l: String)
@@ -107,7 +107,7 @@ object TestData {
107107
LowerCaseData(1, "a") ::
108108
LowerCaseData(2, "b") ::
109109
LowerCaseData(3, "c") ::
110-
LowerCaseData(4, "d") :: Nil).toSchemaRDD
110+
LowerCaseData(4, "d") :: Nil).toDF
111111
lowerCaseData.registerTempTable("lowerCaseData")
112112

113113
case class ArrayData(data: Seq[Int], nestedData: Seq[Seq[Int]])
@@ -201,6 +201,6 @@ object TestData {
201201
TestSQLContext.sparkContext.parallelize(
202202
ComplexData(Map(1 -> "1"), TestData(1, "1"), Seq(1), true)
203203
:: ComplexData(Map(2 -> "2"), TestData(2, "2"), Seq(2), false)
204-
:: Nil).toSchemaRDD
204+
:: Nil).toDF
205205
complexData.registerTempTable("complexData")
206206
}

sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -821,7 +821,7 @@ class JsonSuite extends QueryTest {
821821

822822
val schemaRDD1 = applySchema(rowRDD1, schema1)
823823
schemaRDD1.registerTempTable("applySchema1")
824-
val schemaRDD2 = schemaRDD1.toSchemaRDD
824+
val schemaRDD2 = schemaRDD1.toDF
825825
val result = schemaRDD2.toJSON.collect()
826826
assert(result(0) == "{\"f1\":1,\"f2\":\"A1\",\"f3\":true,\"f4\":[\"1\",\" A1\",\" true\",\" null\"]}")
827827
assert(result(3) == "{\"f1\":4,\"f2\":\"D4\",\"f3\":true,\"f4\":[\"4\",\" D4\",\" true\",\" 2147483644\"],\"f5\":2147483644}")
@@ -842,7 +842,7 @@ class JsonSuite extends QueryTest {
842842

843843
val schemaRDD3 = applySchema(rowRDD2, schema2)
844844
schemaRDD3.registerTempTable("applySchema2")
845-
val schemaRDD4 = schemaRDD3.toSchemaRDD
845+
val schemaRDD4 = schemaRDD3.toDF
846846
val result2 = schemaRDD4.toJSON.collect()
847847

848848
assert(result2(1) == "{\"f1\":{\"f11\":2,\"f12\":false},\"f2\":{\"B2\":null}}")

sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ class InsertIntoHiveTableSuite extends QueryTest {
5252
// Make sure the table has been updated.
5353
checkAnswer(
5454
sql("SELECT * FROM createAndInsertTest"),
55-
testData.toSchemaRDD.collect().toSeq ++ testData.toSchemaRDD.collect().toSeq
55+
testData.toDF.collect().toSeq ++ testData.toDF.collect().toSeq
5656
)
5757

5858
// Now overwrite.

0 commit comments

Comments
 (0)