Skip to content

Commit 8e7fe9b

Browse files
committed
Add parquet, too.
1 parent ef2123e commit 8e7fe9b

File tree

1 file changed

+55
-51
lines changed

1 file changed

+55
-51
lines changed

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala

Lines changed: 55 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -2051,57 +2051,61 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
20512051
}
20522052
}
20532053

2054-
test("SPARK-18355 Use Spark schema to read ORC table instead of ORC file schema") {
2055-
val client = spark.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client
2056-
2057-
Seq("true", "false").foreach { value =>
2058-
withSQLConf(HiveUtils.CONVERT_METASTORE_ORC.key -> value) {
2059-
withTempDatabase { db =>
2060-
client.runSqlHive(
2061-
s"""
2062-
|CREATE TABLE $db.t(
2063-
| click_id string,
2064-
| search_id string,
2065-
| uid bigint)
2066-
|PARTITIONED BY (
2067-
| ts string,
2068-
| hour string)
2069-
|STORED AS ORC
2070-
""".stripMargin)
2071-
2072-
client.runSqlHive(
2073-
s"""
2074-
|INSERT INTO TABLE $db.t
2075-
|PARTITION (ts = '98765', hour = '01')
2076-
|VALUES (12, 2, 12345)
2077-
""".stripMargin
2078-
)
2079-
2080-
checkAnswer(
2081-
sql(s"SELECT click_id, search_id, uid, ts, hour FROM $db.t"),
2082-
Row("12", "2", 12345, "98765", "01"))
2083-
2084-
client.runSqlHive(s"ALTER TABLE $db.t ADD COLUMNS (dummy string)")
2085-
2086-
checkAnswer(
2087-
sql(s"SELECT click_id, search_id FROM $db.t"),
2088-
Row("12", "2"))
2089-
2090-
checkAnswer(
2091-
sql(s"SELECT search_id, click_id FROM $db.t"),
2092-
Row("2", "12"))
2093-
2094-
checkAnswer(
2095-
sql(s"SELECT search_id FROM $db.t"),
2096-
Row("2"))
2097-
2098-
checkAnswer(
2099-
sql(s"SELECT dummy, click_id FROM $db.t"),
2100-
Row(null, "12"))
2101-
2102-
checkAnswer(
2103-
sql(s"SELECT click_id, search_id, uid, dummy, ts, hour FROM $db.t"),
2104-
Row("12", "2", 12345, null, "98765", "01"))
2054+
Seq("orc", "parquet").foreach { format =>
2055+
test(s"SPARK-18355 Read data from a hive table with a new column - $format") {
2056+
val client = spark.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client
2057+
2058+
Seq("true", "false").foreach { value =>
2059+
withSQLConf(
2060+
HiveUtils.CONVERT_METASTORE_ORC.key -> value,
2061+
HiveUtils.CONVERT_METASTORE_PARQUET.key -> value) {
2062+
withTempDatabase { db =>
2063+
client.runSqlHive(
2064+
s"""
2065+
|CREATE TABLE $db.t(
2066+
| click_id string,
2067+
| search_id string,
2068+
| uid bigint)
2069+
|PARTITIONED BY (
2070+
| ts string,
2071+
| hour string)
2072+
|STORED AS $format
2073+
""".stripMargin)
2074+
2075+
client.runSqlHive(
2076+
s"""
2077+
|INSERT INTO TABLE $db.t
2078+
|PARTITION (ts = '98765', hour = '01')
2079+
|VALUES (12, 2, 12345)
2080+
""".stripMargin
2081+
)
2082+
2083+
checkAnswer(
2084+
sql(s"SELECT click_id, search_id, uid, ts, hour FROM $db.t"),
2085+
Row("12", "2", 12345, "98765", "01"))
2086+
2087+
client.runSqlHive(s"ALTER TABLE $db.t ADD COLUMNS (dummy string)")
2088+
2089+
checkAnswer(
2090+
sql(s"SELECT click_id, search_id FROM $db.t"),
2091+
Row("12", "2"))
2092+
2093+
checkAnswer(
2094+
sql(s"SELECT search_id, click_id FROM $db.t"),
2095+
Row("2", "12"))
2096+
2097+
checkAnswer(
2098+
sql(s"SELECT search_id FROM $db.t"),
2099+
Row("2"))
2100+
2101+
checkAnswer(
2102+
sql(s"SELECT dummy, click_id FROM $db.t"),
2103+
Row(null, "12"))
2104+
2105+
checkAnswer(
2106+
sql(s"SELECT click_id, search_id, uid, dummy, ts, hour FROM $db.t"),
2107+
Row("12", "2", 12345, null, "98765", "01"))
2108+
}
21052109
}
21062110
}
21072111
}

0 commit comments

Comments
 (0)