@@ -65,6 +65,8 @@ private[hive] class HiveQLDialect extends Dialect {
65
65
class HiveContext (sc : SparkContext ) extends SQLContext (sc) {
66
66
self =>
67
67
68
+ import HiveContext ._
69
+
68
70
/**
69
71
* When true, enables an experimental feature where metastore tables that use the parquet SerDe
70
72
* are automatically converted to use the Spark SQL parquet table scan, instead of the Hive
@@ -103,18 +105,18 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
103
105
* Spark SQL for execution.
104
106
*/
105
107
protected [hive] def hiveMetastoreVersion : String =
106
- getConf(" spark.sql.hive.metastore.version " , " 0.13.1" )
108
+ getConf(HIVE_METASTORE_VERSION , " 0.13.1" )
107
109
108
110
/**
109
111
* The location of the jars that should be used to instantiate the HiveMetastoreClient. This
110
112
* property can be one of three option:
111
- * - a comma -separated list of jar files that could be passed to a URLClassLoader
113
+ * - a colon -separated list of jar files or directories for hive and hadoop.
112
114
* - builtin - attempt to discover the jars that were used to load Spark SQL and use those. This
113
115
* option is only valid when using the execution version of Hive.
114
116
* - maven - download the correct version of hive on demand from maven.
115
117
*/
116
118
protected [hive] def hiveMetastoreJars : String =
117
- getConf(" spark.sql.hive.metastore.jars " , " builtin" )
119
+ getConf(HIVE_METASTORE_JARS , " builtin" )
118
120
119
121
@ transient
120
122
protected [sql] lazy val substitutor = new VariableSubstitution ()
@@ -173,8 +175,8 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
173
175
throw new IllegalArgumentException (
174
176
" Builtin jars can only be used when hive execution version == hive metastore version. " +
175
177
s " Execution: ${hiveExecutionVersion} != Metastore: ${hiveMetastoreVersion}. " +
176
- " Specify a vaild path to the correct hive jars using spark.sql.hive.metastore.jars " +
177
- s " or change spark.sql.hive.metastore.version to ${ hiveExecutionVersion} . " )
178
+ " Specify a vaild path to the correct hive jars using $HIVE_METASTORE_JARS " +
179
+ s " or change $HIVE_METASTORE_VERSION to $hiveExecutionVersion. " )
178
180
}
179
181
val jars = getClass.getClassLoader match {
180
182
case urlClassLoader : java.net.URLClassLoader => urlClassLoader.getURLs
@@ -198,7 +200,17 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
198
200
s " Initializing HiveMetastoreConnection version $hiveMetastoreVersion using maven. " )
199
201
IsolatedClientLoader .forVersion(hiveMetastoreVersion, allConfig )
200
202
} else {
201
- val jars = hiveMetastoreJars.split(" ," ).map(new java.net.URL (_))
203
+ // Convert to files and expand any directories.
204
+ val jars =
205
+ hiveMetastoreJars
206
+ .split(" :" )
207
+ .map(new java.io.File (_))
208
+ .flatMap {
209
+ case f if f.isDirectory => f.listFiles()
210
+ case f => f :: Nil
211
+ }
212
+ .map(_.toURI.toURL)
213
+
202
214
logInfo(
203
215
s " Initializing HiveMetastoreConnection version $hiveMetastoreVersion using $jars" )
204
216
new IsolatedClientLoader (
@@ -460,7 +472,10 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
460
472
}
461
473
462
474
463
- private object HiveContext {
475
+ private [hive] object HiveContext {
476
+ val HIVE_METASTORE_VERSION : String = " spark.sql.hive.metastore.version"
477
+ val HIVE_METASTORE_JARS : String = " spark.sql.hive.metastore.jars"
478
+
464
479
protected val primitiveTypes =
465
480
Seq (StringType , IntegerType , LongType , DoubleType , FloatType , BooleanType , ByteType ,
466
481
ShortType , DateType , TimestampType , BinaryType )
0 commit comments