Skip to content

Commit f5c22b0

Browse files
committed
Refactor code and update test cases.
1 parent f1cffe4 commit f5c22b0

File tree

6 files changed

+201
-184
lines changed

6 files changed

+201
-184
lines changed

sql/core/src/main/scala/org/apache/spark/sql/json/JSONRelation.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ private[sql] class DefaultSource extends SchemaRelationProvider {
2626
override def createRelation(
2727
sqlContext: SQLContext,
2828
parameters: Map[String, String],
29-
schema: Option[StructType] = None): BaseRelation = {
29+
schema: Option[StructType]): BaseRelation = {
3030
val fileName = parameters.getOrElse("path", sys.error("Option 'path' not specified"))
3131
val samplingRatio = parameters.get("samplingRatio").map(_.toDouble).getOrElse(1.0)
3232

sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ class DefaultSource extends SchemaRelationProvider {
4848
override def createRelation(
4949
sqlContext: SQLContext,
5050
parameters: Map[String, String],
51-
schema: Option[StructType] = None): BaseRelation = {
51+
schema: Option[StructType]): BaseRelation = {
5252
val path =
5353
parameters.getOrElse("path", sys.error("'path' must be specified for parquet tables."))
5454

sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala

Lines changed: 11 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -64,14 +64,14 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
6464

6565
// Data types.
6666
protected val STRING = Keyword("STRING")
67-
protected val FLOAT = Keyword("FLOAT")
68-
protected val INT = Keyword("INT")
67+
protected val BINARY = Keyword("BINARY")
68+
protected val BOOLEAN = Keyword("BOOLEAN")
6969
protected val TINYINT = Keyword("TINYINT")
7070
protected val SMALLINT = Keyword("SMALLINT")
71-
protected val DOUBLE = Keyword("DOUBLE")
71+
protected val INT = Keyword("INT")
7272
protected val BIGINT = Keyword("BIGINT")
73-
protected val BINARY = Keyword("BINARY")
74-
protected val BOOLEAN = Keyword("BOOLEAN")
73+
protected val FLOAT = Keyword("FLOAT")
74+
protected val DOUBLE = Keyword("DOUBLE")
7575
protected val DECIMAL = Keyword("DECIMAL")
7676
protected val DATE = Keyword("DATE")
7777
protected val TIMESTAMP = Keyword("TIMESTAMP")
@@ -105,8 +105,8 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
105105
CREATE ~ TEMPORARY ~ TABLE ~> ident
106106
~ (tableCols).? ~ (USING ~> className) ~ (OPTIONS ~> options) ^^ {
107107
case tableName ~ columns ~ provider ~ opts =>
108-
val tblColumns = if(columns.isEmpty) Seq.empty else columns.get
109-
CreateTableUsing(tableName, tblColumns, provider, opts)
108+
val userSpecifiedSchema = columns.flatMap(fields => Some(StructType(fields)))
109+
CreateTableUsing(tableName, userSpecifiedSchema, provider, opts)
110110
}
111111
)
112112

@@ -184,7 +184,7 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
184184

185185
private[sql] case class CreateTableUsing(
186186
tableName: String,
187-
tableCols: Seq[StructField],
187+
userSpecifiedSchema: Option[StructType],
188188
provider: String,
189189
options: Map[String, String]) extends RunnableCommand {
190190

@@ -203,16 +203,9 @@ private[sql] case class CreateTableUsing(
203203
.asInstanceOf[org.apache.spark.sql.sources.RelationProvider]
204204
.createRelation(sqlContext, new CaseInsensitiveMap(options))
205205
case dataSource: org.apache.spark.sql.sources.SchemaRelationProvider =>
206-
if(tableCols.isEmpty) {
207-
dataSource
208-
.asInstanceOf[org.apache.spark.sql.sources.SchemaRelationProvider]
209-
.createRelation(sqlContext, new CaseInsensitiveMap(options))
210-
} else {
211-
dataSource
212-
.asInstanceOf[org.apache.spark.sql.sources.SchemaRelationProvider]
213-
.createRelation(
214-
sqlContext, new CaseInsensitiveMap(options), Some(StructType(tableCols)))
215-
}
206+
dataSource
207+
.asInstanceOf[org.apache.spark.sql.sources.SchemaRelationProvider]
208+
.createRelation(sqlContext, new CaseInsensitiveMap(options), userSpecifiedSchema)
216209
}
217210

218211
sqlContext.baseRelationToSchemaRDD(relation).registerTempTable(tableName)

sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ trait SchemaRelationProvider {
6868
def createRelation(
6969
sqlContext: SQLContext,
7070
parameters: Map[String, String],
71-
schema: Option[StructType] = None): BaseRelation
71+
schema: Option[StructType]): BaseRelation
7272
}
7373

7474
/**

sql/core/src/test/scala/org/apache/spark/sql/sources/NewTableScanSuite.scala

Lines changed: 0 additions & 163 deletions
This file was deleted.

0 commit comments

Comments
 (0)