Skip to content

Commit aa2cb2e

Browse files
committed
Decouples ParquetTest and TestSQLContext
1 parent 7b43a68 commit aa2cb2e

File tree

5 files changed

+20
-5
lines changed

5 files changed

+20
-5
lines changed

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTest.scala

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,8 @@ import scala.reflect.ClassTag
2323
import scala.reflect.runtime.universe.TypeTag
2424
import scala.util.Try
2525

26-
import org.apache.spark.sql.SchemaRDD
26+
import org.apache.spark.sql.{SQLContext, SchemaRDD}
2727
import org.apache.spark.sql.catalyst.util
28-
import org.apache.spark.sql.test.TestSQLContext._
2928
import org.apache.spark.util.Utils
3029

3130
/**
@@ -36,7 +35,11 @@ import org.apache.spark.util.Utils
3635
* Especially, `Tuple1.apply` can be used to easily wrap a single type/value.
3736
*/
3837
trait ParquetTest {
39-
protected val configuration = sparkContext.hadoopConfiguration
38+
val sqlContext: SQLContext
39+
40+
import sqlContext._
41+
42+
protected def configuration = sparkContext.hadoopConfiguration
4043

4144
/**
4245
* Sets all SQL configurations specified in `pairs`, calls `f`, and then restore all SQL
@@ -86,7 +89,7 @@ trait ParquetTest {
8689
(data: Seq[T])
8790
(f: String => Unit): Unit = {
8891
withTempPath { file =>
89-
sparkContext.parallelize(data).toSchemaRDD.saveAsParquetFile(file.getCanonicalPath)
92+
sparkContext.parallelize(data).saveAsParquetFile(file.getCanonicalPath)
9093
f(file.getCanonicalPath)
9194
}
9295
}

sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetFilterSuite.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import parquet.filter2.predicate.{FilterPredicate, Operators}
2222

2323
import org.apache.spark.sql.catalyst.dsl.expressions._
2424
import org.apache.spark.sql.catalyst.expressions.{Literal, Predicate, Row}
25+
import org.apache.spark.sql.test.TestSQLContext
2526
import org.apache.spark.sql.{QueryTest, SQLConf, SchemaRDD}
2627

2728
/**
@@ -34,6 +35,8 @@ import org.apache.spark.sql.{QueryTest, SQLConf, SchemaRDD}
3435
* @todo Add test cases for `IsNull` and `IsNotNull` after merging PR #3367
3536
*/
3637
class ParquetFilterSuite extends QueryTest with ParquetTest {
38+
val sqlContext = TestSQLContext
39+
3740
private def checkFilterPushdown(
3841
rdd: SchemaRDD,
3942
output: Seq[Symbol],
@@ -76,7 +79,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
7679
case s: Seq[_] => s.map(_.asInstanceOf[Row].getAs[Array[Byte]](0).mkString(","))
7780
case s => Seq(s.asInstanceOf[Array[Byte]].mkString(","))
7881
}
79-
assert(actual.sameElements(expected))
82+
assert(actual === expected)
8083
}
8184
checkFilterPushdown(rdd, output, predicate, filterClass, checkBinaryAnswer _, expectedResult)
8285
}

sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetIOSuite.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ import parquet.schema.{MessageType, MessageTypeParser}
3535
import org.apache.spark.sql.catalyst.ScalaReflection
3636
import org.apache.spark.sql.catalyst.expressions.Row
3737
import org.apache.spark.sql.catalyst.types.DecimalType
38+
import org.apache.spark.sql.test.TestSQLContext
3839
import org.apache.spark.sql.test.TestSQLContext._
3940
import org.apache.spark.sql.{QueryTest, SQLConf, SchemaRDD}
4041

@@ -62,6 +63,8 @@ private[parquet] class TestGroupWriteSupport(schema: MessageType) extends WriteS
6263
* A test suite that tests basic Parquet I/O.
6364
*/
6465
class ParquetIOSuite extends QueryTest with ParquetTest {
66+
val sqlContext = TestSQLContext
67+
6568
/**
6669
* Writes `data` to a Parquet file, reads it back and check file contents.
6770
*/

sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite2.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,15 @@ package org.apache.spark.sql.parquet
1919

2020
import org.apache.spark.sql.QueryTest
2121
import org.apache.spark.sql.catalyst.expressions.Row
22+
import org.apache.spark.sql.test.TestSQLContext
2223
import org.apache.spark.sql.test.TestSQLContext._
2324

2425
/**
2526
* A test suite that tests various Parquet queries.
2627
*/
2728
class ParquetQuerySuite2 extends QueryTest with ParquetTest {
29+
val sqlContext = TestSQLContext
30+
2831
test("simple projection") {
2932
withParquetTable((0 until 10).map(i => (i, i.toString)), "t") {
3033
checkAnswer(sql("SELECT _1 FROM t"), (0 until 10).map(Row.apply(_)))

sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetSchemaSuite.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,11 @@ import parquet.schema.MessageTypeParser
2525

2626
import org.apache.spark.sql.catalyst.ScalaReflection
2727
import org.apache.spark.sql.catalyst.types.{BinaryType, IntegerType, StructField, StructType}
28+
import org.apache.spark.sql.test.TestSQLContext
2829

2930
class ParquetSchemaSuite extends FunSuite with ParquetTest {
31+
val sqlContext = TestSQLContext
32+
3033
/**
3134
* Checks whether the reflected Parquet message type for product type `T` conforms `messageType`.
3235
*/

0 commit comments

Comments
 (0)