@@ -20,7 +20,7 @@ package org.apache.spark.sql.hive
20
20
import java .io .IOException
21
21
import java .util .{List => JList }
22
22
23
- import com .google .common .cache .{CacheLoader , CacheBuilder }
23
+ import com .google .common .cache .{LoadingCache , CacheLoader , CacheBuilder }
24
24
25
25
import org .apache .hadoop .util .ReflectionUtils
26
26
import org .apache .hadoop .hive .metastore .TableType
@@ -54,46 +54,47 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
54
54
55
55
// TODO: Use this everywhere instead of tuples or databaseName, tableName,.
56
56
/** A fully qualified identifier for a table (i.e., database.tableName) */
57
- case class TableIdent (database : String , name : String ) {
58
- def toLowerCase = TableIdent (database.toLowerCase, name.toLowerCase)
57
+ case class QualifiedTableName (database : String , name : String ) {
58
+ def toLowerCase = QualifiedTableName (database.toLowerCase, name.toLowerCase)
59
59
}
60
60
61
61
/** A cache of Spark SQL data source tables that have been accessed. */
62
- protected [hive] val cachedDataSourceTables = CacheBuilder .newBuilder()
63
- .maximumSize(1000 )
64
- .build(
65
- new CacheLoader [TableIdent , LogicalPlan ]() {
66
- override def load (in : TableIdent ): LogicalPlan = {
67
- logDebug(s " Creating new cached data source for $in" )
68
- val table = client.getTable(in.database, in.name)
69
- val schemaString = table.getProperty(" spark.sql.sources.schema" )
70
- val userSpecifiedSchema =
71
- if (schemaString == null ) {
72
- None
73
- } else {
74
- Some (DataType .fromJson(schemaString).asInstanceOf [StructType ])
75
- }
76
- // It does not appear that the ql client for the metastore has a way to enumerate all the
77
- // SerDe properties directly...
78
- val options = table.getTTable.getSd.getSerdeInfo.getParameters.toMap
79
-
80
- val resolvedRelation =
81
- ResolvedDataSource (
82
- hive,
83
- userSpecifiedSchema,
84
- table.getProperty(" spark.sql.sources.provider" ),
85
- options)
86
-
87
- LogicalRelation (resolvedRelation.relation)
88
- }
89
- })
62
+ protected [hive] val cachedDataSourceTables : LoadingCache [QualifiedTableName , LogicalPlan ] = {
63
+ val cacheLoader = new CacheLoader [QualifiedTableName , LogicalPlan ]() {
64
+ override def load (in : QualifiedTableName ): LogicalPlan = {
65
+ logDebug(s " Creating new cached data source for $in" )
66
+ val table = client.getTable(in.database, in.name)
67
+ val schemaString = table.getProperty(" spark.sql.sources.schema" )
68
+ val userSpecifiedSchema =
69
+ if (schemaString == null ) {
70
+ None
71
+ } else {
72
+ Some (DataType .fromJson(schemaString).asInstanceOf [StructType ])
73
+ }
74
+ // It does not appear that the ql client for the metastore has a way to enumerate all the
75
+ // SerDe properties directly...
76
+ val options = table.getTTable.getSd.getSerdeInfo.getParameters.toMap
77
+
78
+ val resolvedRelation =
79
+ ResolvedDataSource (
80
+ hive,
81
+ userSpecifiedSchema,
82
+ table.getProperty(" spark.sql.sources.provider" ),
83
+ options)
84
+
85
+ LogicalRelation (resolvedRelation.relation)
86
+ }
87
+ }
88
+
89
+ CacheBuilder .newBuilder().maximumSize(1000 ).build(cacheLoader)
90
+ }
90
91
91
92
def refreshTable (databaseName : String , tableName : String ): Unit = {
92
- cachedDataSourceTables.refresh(TableIdent (databaseName, tableName).toLowerCase)
93
+ cachedDataSourceTables.refresh(QualifiedTableName (databaseName, tableName).toLowerCase)
93
94
}
94
95
95
96
def invalidateTable (databaseName : String , tableName : String ): Unit = {
96
- cachedDataSourceTables.invalidate(TableIdent (databaseName, tableName).toLowerCase)
97
+ cachedDataSourceTables.invalidate(QualifiedTableName (databaseName, tableName).toLowerCase)
97
98
}
98
99
99
100
val caseSensitive : Boolean = false
@@ -143,7 +144,7 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) extends Catalog with
143
144
val table = client.getTable(databaseName, tblName)
144
145
145
146
if (table.getProperty(" spark.sql.sources.provider" ) != null ) {
146
- cachedDataSourceTables(TableIdent (databaseName, tblName).toLowerCase)
147
+ cachedDataSourceTables(QualifiedTableName (databaseName, tblName).toLowerCase)
147
148
} else if (table.isView) {
148
149
// if the unresolved relation is from hive view
149
150
// parse the text into logic node.
0 commit comments