Skip to content

Commit c9d7301

Browse files
committed
organize imports
1 parent 1fcbf13 commit c9d7301

File tree

5 files changed

+14
-19
lines changed

5 files changed

+14
-19
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/dataTypes.scala

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,23 +19,21 @@ package org.apache.spark.sql.catalyst.types
1919

2020
import java.sql.Timestamp
2121

22-
import org.apache.spark.sql.catalyst.util.Metadata
23-
2422
import scala.math.Numeric.{BigDecimalAsIfIntegral, DoubleAsIfIntegral, FloatAsIfIntegral}
2523
import scala.reflect.ClassTag
2624
import scala.reflect.runtime.universe.{TypeTag, runtimeMirror, typeTag}
2725
import scala.util.parsing.combinator.RegexParsers
2826

29-
import org.json4s.JsonAST.JValue
3027
import org.json4s._
28+
import org.json4s.JsonAST.JValue
3129
import org.json4s.JsonDSL._
3230
import org.json4s.jackson.JsonMethods._
3331

3432
import org.apache.spark.sql.catalyst.ScalaReflectionLock
3533
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Expression}
34+
import org.apache.spark.sql.catalyst.util.Metadata
3635
import org.apache.spark.util.Utils
3736

38-
3937
object DataType {
4038
def fromJson(json: String): DataType = parseDataType(parse(json))
4139

@@ -380,8 +378,7 @@ case class ArrayType(elementType: DataType, containsNull: Boolean) extends DataT
380378
* @param name The name of this field.
381379
* @param dataType The data type of this field.
382380
* @param nullable Indicates if values of this field can be `null` values.
383-
* @param metadata The metadata of this field, which is a map from string to simple type that can be
384-
* serialized to JSON automatically. The metadata should be preserved during
381+
* @param metadata The metadata of this field. The metadata should be preserved during
385382
* transformation if the content of the column is not modified, e.g, in selection.
386383
*/
387384
case class StructField(

sql/core/src/main/java/org/apache/spark/sql/api/java/DataType.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,10 @@
1717

1818
package org.apache.spark.sql.api.java;
1919

20-
import org.apache.spark.sql.catalyst.util.Metadata;
21-
2220
import java.util.*;
2321

22+
import org.apache.spark.sql.catalyst.util.Metadata;
23+
2424
/**
2525
* The base type of all Spark SQL data types.
2626
*

sql/core/src/main/java/org/apache/spark/sql/api/java/StructField.java

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,22 +17,21 @@
1717

1818
package org.apache.spark.sql.api.java;
1919

20-
import org.apache.spark.sql.catalyst.util.Metadata;
21-
2220
import java.util.Map;
2321

22+
import org.apache.spark.sql.catalyst.util.Metadata;
23+
2424
/**
2525
* A StructField object represents a field in a StructType object.
2626
* A StructField object comprises three fields, {@code String name}, {@code DataType dataType},
2727
* and {@code boolean nullable}. The field of {@code name} is the name of a StructField.
2828
* The field of {@code dataType} specifies the data type of a StructField.
2929
* The field of {@code nullable} specifies if values of a StructField can contain {@code null}
3030
* values.
31-
* The field of {@code metadata} provides extra information of the StructField, which is a map from
32-
* string to simple type that can be serialized to JSON automatically
31+
* The field of {@code metadata} provides extra information of the StructField.
3332
*
3433
* To create a {@link StructField},
35-
* {@link DataType#createStructField(String, DataType, boolean, Map)}
34+
* {@link DataType#createStructField(String, DataType, boolean, Metadata)}
3635
* should be used.
3736
*/
3837
public class StructField {

sql/core/src/test/scala/org/apache/spark/sql/DataTypeSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,10 @@
1717

1818
package org.apache.spark.sql
1919

20-
import org.apache.spark.sql.catalyst.util.MetadataBuilder
2120
import org.scalatest.FunSuite
2221

2322
import org.apache.spark.sql.catalyst.types.DataType
23+
import org.apache.spark.sql.catalyst.util.MetadataBuilder
2424

2525
class DataTypeSuite extends FunSuite {
2626

sql/core/src/test/scala/org/apache/spark/sql/api/java/ScalaSideDataTypeConversionSuite.scala

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,12 @@
1717

1818
package org.apache.spark.sql.api.java
1919

20-
import org.apache.spark.sql.catalyst.util.MetadataBuilder
21-
import org.apache.spark.sql.types.util.DataTypeConversions
2220
import org.scalatest.FunSuite
2321

24-
import org.apache.spark.sql.{DataType => SDataType, StructField => SStructField}
25-
import org.apache.spark.sql.{StructType => SStructType}
26-
import DataTypeConversions._
22+
import org.apache.spark.sql.{DataType => SDataType, StructField => SStructField,
23+
StructType => SStructType}
24+
import org.apache.spark.sql.catalyst.util.MetadataBuilder
25+
import org.apache.spark.sql.types.util.DataTypeConversions._
2726

2827
class ScalaSideDataTypeConversionSuite extends FunSuite {
2928

0 commit comments

Comments
 (0)