17
17
18
18
package org .apache .spark .sql .hive
19
19
20
- import java .io .IOException
21
- import java .util .{List => JList }
22
-
23
20
import com .google .common .base .Objects
24
21
import com .google .common .cache .{CacheBuilder , CacheLoader , LoadingCache }
25
22
26
23
import org .apache .hadoop .fs .Path
27
- import org .apache .hadoop .hive .metastore .api .{ FieldSchema , Partition => TPartition , Table => TTable }
28
- import org .apache .hadoop .hive .metastore .{ TableType , Warehouse }
24
+ import org .apache .hadoop .hive .metastore .api .FieldSchema
25
+ import org .apache .hadoop .hive .metastore .Warehouse
29
26
import org .apache .hadoop .hive .ql .metadata ._
30
- import org .apache .hadoop .hive .ql .plan .CreateTableDesc
31
- import org .apache .hadoop .hive .serde .serdeConstants
32
- import org .apache .hadoop .hive .serde2 .`lazy` .LazySimpleSerDe
33
- import org .apache .hadoop .hive .serde2 .{Deserializer , SerDeException }
34
- import org .apache .hadoop .util .ReflectionUtils
27
+ import org .apache .hadoop .hive .serde2 .Deserializer
35
28
36
29
import org .apache .spark .Logging
37
- import org .apache .spark .sql .hive .client .IsolatedClientLoader
38
30
import org .apache .spark .sql .{SaveMode , AnalysisException , SQLContext }
39
- import org .apache .spark .sql .catalyst .analysis .{MultiInstanceRelation , NoSuchTableException , Catalog , OverrideCatalog }
31
+ import org .apache .spark .sql .catalyst .analysis .{MultiInstanceRelation , Catalog , OverrideCatalog }
40
32
import org .apache .spark .sql .catalyst .expressions ._
41
33
import org .apache .spark .sql .catalyst .planning .PhysicalOperation
42
34
import org .apache .spark .sql .catalyst .plans .logical
43
35
import org .apache .spark .sql .catalyst .plans .logical ._
44
36
import org .apache .spark .sql .catalyst .rules ._
45
37
import org .apache .spark .sql .hive .client ._
46
38
import org .apache .spark .sql .parquet .{ParquetRelation2 , Partition => ParquetPartition , PartitionSpec }
47
- import org .apache .spark .sql .sources .{CreateTableUsingAsSelect , DDLParser , LogicalRelation , ResolvedDataSource }
39
+ import org .apache .spark .sql .sources .{CreateTableUsingAsSelect , LogicalRelation , ResolvedDataSource }
48
40
import org .apache .spark .sql .types ._
49
- import org .apache .spark .sql .catalyst .CatalystConf
50
41
import org .apache .spark .util .Utils
51
42
52
43
/* Implicit conversions */
@@ -55,8 +46,6 @@ import scala.collection.JavaConversions._
55
46
private [hive] class HiveMetastoreCatalog (val client : ClientInterface , hive : HiveContext )
56
47
extends Catalog with Logging {
57
48
58
- import org .apache .spark .sql .hive .HiveMetastoreTypes ._
59
-
60
49
/** Usages should lock on `this`. */
61
50
protected [hive] lazy val hiveWarehouse = new Warehouse (hive.hiveconf)
62
51
0 commit comments