Skip to content

Commit 10c3565

Browse files
committed
address review comments
1 parent 6bc9204 commit 10c3565

File tree

6 files changed

+21
-18
lines changed

6 files changed

+21
-18
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ private[hive] object HiveQl {
216216
*/
217217
def getAst(sql: String): ASTNode = {
218218
/*
219-
* Context has to be passed in in hive0.13.1.
219+
* Context has to be passed in hive0.13.1.
220220
* Otherwise, there will be Null pointer exception,
221221
* when retrieving properties form HiveConf.
222222
*/

sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
6161
// By clearing the port we force Spark to pick a new one. This allows us to rerun tests
6262
// without restarting the JVM.
6363
System.clearProperty("spark.hostPort")
64+
CommandProcessorFactory.clean(hiveconf)
6465

6566
lazy val warehousePath = getTempFilePath("sparkHiveWarehouse").getCanonicalPath
6667
lazy val metastorePath = getTempFilePath("sparkHiveMetastore").getCanonicalPath
@@ -79,7 +80,6 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
7980
// For some hive test case which contain ${system:test.tmp.dir}
8081
System.setProperty("test.tmp.dir", testTempDir.getCanonicalPath)
8182

82-
CommandProcessorFactory.clean(hiveconf)
8383
configure() // Must be called before initializing the catalog below.
8484

8585
/** The location of the compiled hive distribution */
@@ -371,6 +371,8 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
371371
* tests.
372372
*/
373373
protected val originalUdfs: JavaSet[String] = FunctionRegistry.getFunctionNames
374+
375+
// Database default may not exist in 0.13.1, create it if not exist
374376
HiveShim.createDefaultDBIfNeeded(this)
375377

376378
/**

sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ class StatisticsSuite extends QueryTest with BeforeAndAfterAll {
8181
sql("INSERT INTO TABLE analyzeTable SELECT * FROM src").collect()
8282
sql("INSERT INTO TABLE analyzeTable SELECT * FROM src").collect()
8383

84-
// TODO: How it works? needs to add it back for other hive version.
84+
// TODO: How does it works? needs to add it back for other hive version.
8585
if (HiveShim.version =="0.12.0") {
8686
assert(queryTotalSize("analyzeTable") === defaultSizeInBytes)
8787
}

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -557,7 +557,8 @@ class HiveQuerySuite extends HiveComparisonTest {
557557
|WITH serdeproperties('s1'='9')
558558
""".stripMargin)
559559
}
560-
// now only verify 0.12.0, and ignore other versions due to binary compatability
560+
// Now only verify 0.12.0, and ignore other versions due to binary compatability
561+
// current TestSerDe.jar is from 0.12.0
561562
if (HiveShim.version == "0.12.0") {
562563
sql(s"ADD JAR $testJar")
563564
sql(

sql/hive/v0.12.0/src/main/scala/org/apache/spark/sql/hive/Shim.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ private[hive] object HiveShim {
6969
ColumnProjectionUtils.appendReadColumnNames(conf, names)
7070
}
7171

72-
def getExternalTmpPath(context: Context, uri: URI): String = {
72+
def getExternalTmpPath(context: Context, uri: URI) = {
7373
context.getExternalTmpFileURI(uri)
7474
}
7575

sql/hive/v0.13.1/src/main/scala/org/apache/spark/sql/hive/Shim.scala

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -40,10 +40,10 @@ import scala.language.implicitConversions
4040
private[hive] object HiveShim {
4141
val version = "0.13.1"
4242
/*
43-
* TODO: hive-0.13 support DECIMAL(precision, scale), DECIMAL in hive-0.12 is actually DECIMAL(10,0)
43+
* TODO: hive-0.13 support DECIMAL(precision, scale), DECIMAL in hive-0.12 is actually DECIMAL(38,unbounded)
4444
* Full support of new decimal feature need to be fixed in seperate PR.
4545
*/
46-
val metastoreDecimal = "decimal(10,0)"
46+
val metastoreDecimal = "decimal\\((\\d+),(\\d+)\\)".r
4747

4848
def getTableDesc(
4949
serdeClass: Class[_ <: Deserializer],
@@ -82,8 +82,7 @@ private[hive] object HiveShim {
8282
for (col <- cols) {
8383
if (first) {
8484
first = false
85-
}
86-
else {
85+
} else {
8786
result.append(',')
8887
}
8988
result.append(col)
@@ -98,7 +97,9 @@ private[hive] object HiveShim {
9897
if (ids != null && ids.size > 0) {
9998
ColumnProjectionUtils.appendReadColumns(conf, ids)
10099
}
101-
appendReadColumnNames(conf, names)
100+
if (names == null && names.size > 0) {
101+
appendReadColumnNames(conf, names)
102+
}
102103
}
103104

104105
def getExternalTmpPath(context: Context, path: Path) = {
@@ -110,17 +111,16 @@ private[hive] object HiveShim {
110111
def getAllPartitionsOf(client: Hive, tbl: Table) = client.getAllPartitionsOf(tbl)
111112

112113
/*
113-
* Bug introdiced in hive-0.13. FileSinkDesc is serilizable, but its member path is not.
114+
* Bug introdiced in hive-0.13. FileSinkDesc is serializable, but its member path is not.
114115
* Fix it through wrapper.
115116
* */
116117
implicit def wrapperToFileSinkDesc(w: ShimFileSinkDesc): FileSinkDesc = {
117-
var f = new FileSinkDesc(new Path(w.dir), w.tableInfo, w.compressed)
118-
f.setCompressed(w.compressed)
119-
f.setCompressCodec(w.compressCodec)
120-
f.setCompressType(w.compressType)
121-
f.setTableInfo(w.tableInfo)
122-
f.setDestTableId(w.destTableId)
123-
f
118+
var f = new FileSinkDesc(new Path(w.dir), w.tableInfo, w.compressed)
119+
f.setCompressCodec(w.compressCodec)
120+
f.setCompressType(w.compressType)
121+
f.setTableInfo(w.tableInfo)
122+
f.setDestTableId(w.destTableId)
123+
f
124124
}
125125
}
126126

0 commit comments

Comments
 (0)