Skip to content

Commit b621c8f

Browse files
committed
minor refactory
1 parent d02547f commit b621c8f

File tree

2 files changed

+61
-41
lines changed

2 files changed

+61
-41
lines changed
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.sources
19+
20+
import org.apache.spark.sql.SQLContext
21+
import org.apache.spark.sql.catalyst.types.{StructType, StructField}
22+
import org.apache.spark.sql.execution.RunnableCommand
23+
import org.apache.spark.util.Utils
24+
25+
private[sql] case class CreateTableUsing(
26+
tableName: String,
27+
tableCols: Seq[StructField],
28+
provider: String,
29+
options: Map[String, String]) extends RunnableCommand {
30+
31+
def run(sqlContext: SQLContext) = {
32+
val loader = Utils.getContextOrSparkClassLoader
33+
val clazz: Class[_] = try loader.loadClass(provider) catch {
34+
case cnf: java.lang.ClassNotFoundException =>
35+
try loader.loadClass(provider + ".DefaultSource") catch {
36+
case cnf: java.lang.ClassNotFoundException =>
37+
sys.error(s"Failed to load class for data source: $provider")
38+
}
39+
}
40+
val relation = clazz.newInstance match {
41+
case dataSource: org.apache.spark.sql.sources.RelationProvider =>
42+
dataSource
43+
.asInstanceOf[org.apache.spark.sql.sources.RelationProvider]
44+
.createRelation(sqlContext, new CaseInsensitiveMap(options))
45+
case dataSource: org.apache.spark.sql.sources.SchemaRelationProvider =>
46+
if(tableCols.isEmpty) {
47+
dataSource
48+
.asInstanceOf[org.apache.spark.sql.sources.SchemaRelationProvider]
49+
.createRelation(sqlContext, new CaseInsensitiveMap(options))
50+
} else {
51+
dataSource
52+
.asInstanceOf[org.apache.spark.sql.sources.SchemaRelationProvider]
53+
.createRelation(
54+
sqlContext, new CaseInsensitiveMap(options), Some(StructType(tableCols)))
55+
}
56+
}
57+
58+
sqlContext.baseRelationToSchemaRDD(relation).registerTempTable(tableName)
59+
Seq.empty
60+
}
61+
}

sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala

Lines changed: 0 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,7 @@ import scala.util.parsing.combinator.syntactical.StandardTokenParsers
2222
import scala.util.parsing.combinator.PackratParsers
2323

2424
import org.apache.spark.Logging
25-
import org.apache.spark.sql.SQLContext
2625
import org.apache.spark.sql.catalyst.types._
27-
import org.apache.spark.sql.execution.RunnableCommand
28-
import org.apache.spark.util.Utils
2926
import org.apache.spark.sql.catalyst.plans.logical._
3027
import org.apache.spark.sql.catalyst.SqlLexical
3128

@@ -182,44 +179,6 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
182179
}
183180
}
184181

185-
private[sql] case class CreateTableUsing(
186-
tableName: String,
187-
tableCols: Seq[StructField],
188-
provider: String,
189-
options: Map[String, String]) extends RunnableCommand {
190-
191-
def run(sqlContext: SQLContext) = {
192-
val loader = Utils.getContextOrSparkClassLoader
193-
val clazz: Class[_] = try loader.loadClass(provider) catch {
194-
case cnf: java.lang.ClassNotFoundException =>
195-
try loader.loadClass(provider + ".DefaultSource") catch {
196-
case cnf: java.lang.ClassNotFoundException =>
197-
sys.error(s"Failed to load class for data source: $provider")
198-
}
199-
}
200-
val relation = clazz.newInstance match {
201-
case dataSource: org.apache.spark.sql.sources.RelationProvider =>
202-
dataSource
203-
.asInstanceOf[org.apache.spark.sql.sources.RelationProvider]
204-
.createRelation(sqlContext, new CaseInsensitiveMap(options))
205-
case dataSource: org.apache.spark.sql.sources.SchemaRelationProvider =>
206-
if(tableCols.isEmpty) {
207-
dataSource
208-
.asInstanceOf[org.apache.spark.sql.sources.SchemaRelationProvider]
209-
.createRelation(sqlContext, new CaseInsensitiveMap(options))
210-
} else {
211-
dataSource
212-
.asInstanceOf[org.apache.spark.sql.sources.SchemaRelationProvider]
213-
.createRelation(
214-
sqlContext, new CaseInsensitiveMap(options), Some(StructType(tableCols)))
215-
}
216-
}
217-
218-
sqlContext.baseRelationToSchemaRDD(relation).registerTempTable(tableName)
219-
Seq.empty
220-
}
221-
}
222-
223182
/**
224183
* Builds a map in which keys are case insensitive
225184
*/

0 commit comments

Comments
 (0)