Skip to content

Commit af512c7

Browse files
committed
fix conflicts
1 parent 4ef1be7 commit af512c7

File tree

1 file changed

+5
-16
lines changed

1 file changed

+5
-16
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala

Lines changed: 5 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -17,36 +17,27 @@
1717

1818
package org.apache.spark.sql.hive
1919

20-
import java.io.IOException
21-
import java.util.{List => JList}
22-
2320
import com.google.common.base.Objects
2421
import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache}
2522

2623
import org.apache.hadoop.fs.Path
27-
import org.apache.hadoop.hive.metastore.api.{FieldSchema, Partition => TPartition, Table => TTable}
28-
import org.apache.hadoop.hive.metastore.{TableType, Warehouse}
24+
import org.apache.hadoop.hive.metastore.api.FieldSchema
25+
import org.apache.hadoop.hive.metastore.Warehouse
2926
import org.apache.hadoop.hive.ql.metadata._
30-
import org.apache.hadoop.hive.ql.plan.CreateTableDesc
31-
import org.apache.hadoop.hive.serde.serdeConstants
32-
import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe
33-
import org.apache.hadoop.hive.serde2.{Deserializer, SerDeException}
34-
import org.apache.hadoop.util.ReflectionUtils
27+
import org.apache.hadoop.hive.serde2.Deserializer
3528

3629
import org.apache.spark.Logging
37-
import org.apache.spark.sql.hive.client.IsolatedClientLoader
3830
import org.apache.spark.sql.{SaveMode, AnalysisException, SQLContext}
39-
import org.apache.spark.sql.catalyst.analysis.{MultiInstanceRelation, NoSuchTableException, Catalog, OverrideCatalog}
31+
import org.apache.spark.sql.catalyst.analysis.{MultiInstanceRelation, Catalog, OverrideCatalog}
4032
import org.apache.spark.sql.catalyst.expressions._
4133
import org.apache.spark.sql.catalyst.planning.PhysicalOperation
4234
import org.apache.spark.sql.catalyst.plans.logical
4335
import org.apache.spark.sql.catalyst.plans.logical._
4436
import org.apache.spark.sql.catalyst.rules._
4537
import org.apache.spark.sql.hive.client._
4638
import org.apache.spark.sql.parquet.{ParquetRelation2, Partition => ParquetPartition, PartitionSpec}
47-
import org.apache.spark.sql.sources.{CreateTableUsingAsSelect, DDLParser, LogicalRelation, ResolvedDataSource}
39+
import org.apache.spark.sql.sources.{CreateTableUsingAsSelect, LogicalRelation, ResolvedDataSource}
4840
import org.apache.spark.sql.types._
49-
import org.apache.spark.sql.catalyst.CatalystConf
5041
import org.apache.spark.util.Utils
5142

5243
/* Implicit conversions */
@@ -55,8 +46,6 @@ import scala.collection.JavaConversions._
5546
private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: HiveContext)
5647
extends Catalog with Logging {
5748

58-
import org.apache.spark.sql.hive.HiveMetastoreTypes._
59-
6049
/** Usages should lock on `this`. */
6150
protected[hive] lazy val hiveWarehouse = new Warehouse(hive.hiveconf)
6251

0 commit comments

Comments
 (0)