Skip to content

Commit 0a703e7

Browse files
committed
Test failed again since we cannot read correct content.
1 parent 9a26611 commit 0a703e7

File tree

1 file changed

+13
-0
lines changed

1 file changed

+13
-0
lines changed

sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,8 @@ import org.apache.spark.sql.hive.test.TestHive.implicits._
3434
import org.apache.spark.sql.parquet.ParquetRelation2
3535
import org.apache.spark.sql.sources.LogicalRelation
3636

37+
import scala.collection.mutable.ArrayBuffer
38+
3739
/**
3840
* Tests for persisting tables created though the data sources API into the metastore.
3941
*/
@@ -596,12 +598,23 @@ class MetastoreDataSourcesSuite extends QueryTest with BeforeAndAfterEach {
596598
test("Pre insert nullability check") {
597599
val df1 =
598600
createDataFrame(Tuple1(Seq(Int.box(1), null.asInstanceOf[Integer])) :: Nil).toDF("a")
601+
val expectedSchema1 =
602+
StructType(
603+
StructField("a", ArrayType(IntegerType, containsNull = true), nullable = true) :: Nil)
604+
assert(df1.schema === expectedSchema1)
599605
df1.saveAsTable("arrayInParquet", "parquet", SaveMode.Overwrite)
600606

601607
val df2 =
602608
createDataFrame(Tuple1(Seq(2, 3)) :: Nil).toDF("a")
609+
val expectedSchema2 =
610+
StructType(
611+
StructField("a", ArrayType(IntegerType, containsNull = false), nullable = true) :: Nil)
612+
assert(df2.schema === expectedSchema2)
603613
df2.saveAsTable("arrayInParquet", SaveMode.Append)
604614

615+
checkAnswer(
616+
sql("SELECT a FROM arrayInParquet"),
617+
Row(ArrayBuffer(1, null)) :: Row(ArrayBuffer(2, 3)) :: Nil)
605618
}
606619

607620
test("SPARK-6024 wide schema support") {

0 commit comments

Comments
 (0)