@@ -64,14 +64,14 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
64
64
65
65
// Data types.
66
66
protected val STRING = Keyword (" STRING" )
67
- protected val FLOAT = Keyword (" FLOAT " )
68
- protected val INT = Keyword (" INT " )
67
+ protected val BINARY = Keyword (" BINARY " )
68
+ protected val BOOLEAN = Keyword (" BOOLEAN " )
69
69
protected val TINYINT = Keyword (" TINYINT" )
70
70
protected val SMALLINT = Keyword (" SMALLINT" )
71
- protected val DOUBLE = Keyword (" DOUBLE " )
71
+ protected val INT = Keyword (" INT " )
72
72
protected val BIGINT = Keyword (" BIGINT" )
73
- protected val BINARY = Keyword (" BINARY " )
74
- protected val BOOLEAN = Keyword (" BOOLEAN " )
73
+ protected val FLOAT = Keyword (" FLOAT " )
74
+ protected val DOUBLE = Keyword (" DOUBLE " )
75
75
protected val DECIMAL = Keyword (" DECIMAL" )
76
76
protected val DATE = Keyword (" DATE" )
77
77
protected val TIMESTAMP = Keyword (" TIMESTAMP" )
@@ -105,8 +105,8 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
105
105
CREATE ~ TEMPORARY ~ TABLE ~> ident
106
106
~ (tableCols).? ~ (USING ~> className) ~ (OPTIONS ~> options) ^^ {
107
107
case tableName ~ columns ~ provider ~ opts =>
108
- val tblColumns = if ( columns.isEmpty) Seq .empty else columns.get
109
- CreateTableUsing (tableName, tblColumns , provider, opts)
108
+ val userSpecifiedSchema = columns.flatMap(fields => Some ( StructType (fields)))
109
+ CreateTableUsing (tableName, userSpecifiedSchema , provider, opts)
110
110
}
111
111
)
112
112
@@ -184,7 +184,7 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
184
184
185
185
private [sql] case class CreateTableUsing (
186
186
tableName : String ,
187
- tableCols : Seq [ StructField ],
187
+ userSpecifiedSchema : Option [ StructType ],
188
188
provider : String ,
189
189
options : Map [String , String ]) extends RunnableCommand {
190
190
@@ -203,16 +203,9 @@ private[sql] case class CreateTableUsing(
203
203
.asInstanceOf [org.apache.spark.sql.sources.RelationProvider ]
204
204
.createRelation(sqlContext, new CaseInsensitiveMap (options))
205
205
case dataSource : org.apache.spark.sql.sources.SchemaRelationProvider =>
206
- if (tableCols.isEmpty) {
207
- dataSource
208
- .asInstanceOf [org.apache.spark.sql.sources.SchemaRelationProvider ]
209
- .createRelation(sqlContext, new CaseInsensitiveMap (options))
210
- } else {
211
- dataSource
212
- .asInstanceOf [org.apache.spark.sql.sources.SchemaRelationProvider ]
213
- .createRelation(
214
- sqlContext, new CaseInsensitiveMap (options), Some (StructType (tableCols)))
215
- }
206
+ dataSource
207
+ .asInstanceOf [org.apache.spark.sql.sources.SchemaRelationProvider ]
208
+ .createRelation(sqlContext, new CaseInsensitiveMap (options), userSpecifiedSchema)
216
209
}
217
210
218
211
sqlContext.baseRelationToSchemaRDD(relation).registerTempTable(tableName)
0 commit comments