From c4589286f534c6e720954c0433903643c73d201e Mon Sep 17 00:00:00 2001 From: witgo Date: Fri, 9 May 2014 11:16:50 +0800 Subject: [PATCH 1/9] update scalatest to version 2.1.5 --- .../scala/org/apache/spark/ContextCleanerSuite.scala | 6 +++--- .../scala/org/apache/spark/ShuffleNettySuite.scala | 4 ++-- .../test/scala/org/apache/spark/rdd/RDDSuite.scala | 5 +++-- .../apache/spark/scheduler/DAGSchedulerSuite.scala | 4 ++-- pom.xml | 4 ++-- project/SparkBuild.scala | 4 ++-- .../apache/spark/sql/parquet/ParquetQuerySuite.scala | 4 ++-- .../spark/streaming/BasicOperationsSuite.scala | 12 ++++++------ 8 files changed, 22 insertions(+), 21 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala index 5a8310090890d..dc2db66df60e0 100644 --- a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala +++ b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala @@ -25,7 +25,7 @@ import scala.language.postfixOps import scala.util.Random import org.scalatest.{BeforeAndAfter, FunSuite} -import org.scalatest.concurrent.Eventually +import org.scalatest.concurrent.{PatienceConfiguration, Eventually} import org.scalatest.concurrent.Eventually._ import org.scalatest.time.SpanSugar._ @@ -76,7 +76,7 @@ class ContextCleanerSuite extends FunSuite with BeforeAndAfter with LocalSparkCo tester.assertCleanup() // Verify that shuffles can be re-executed after cleaning up - assert(rdd.collect().toList === collected) + assert(rdd.collect().toList.equals(collected)) } test("cleanup broadcast") { @@ -285,7 +285,7 @@ class CleanerTester( sc.cleaner.get.attachListener(cleanerListener) /** Assert that all the stuff has been cleaned up */ - def assertCleanup()(implicit waitTimeout: Eventually.Timeout) { + def assertCleanup()(implicit waitTimeout: PatienceConfiguration.Timeout) { try { eventually(waitTimeout, interval(100 millis)) { assert(isAllCleanedUp) diff --git a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala index 29d428aa7dc41..47df00050c1e2 100644 --- a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala +++ b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala @@ -23,11 +23,11 @@ class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll { // This test suite should run all tests in ShuffleSuite with Netty shuffle mode. - override def beforeAll(configMap: Map[String, Any]) { + override def beforeAll() { System.setProperty("spark.shuffle.use.netty", "true") } - override def afterAll(configMap: Map[String, Any]) { + override def afterAll() { System.setProperty("spark.shuffle.use.netty", "false") } } diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala index 8da9a0da700e0..bfd4e55cbcac1 100644 --- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala @@ -235,8 +235,9 @@ class RDDSuite extends FunSuite with SharedSparkContext { // we can optionally shuffle to keep the upstream parallel val coalesced5 = data.coalesce(1, shuffle = true) - assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] != - null) + val bool = coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] != + null + assert(bool) // when shuffling, we can increase the number of partitions val coalesced6 = data.coalesce(20, shuffle = true) diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala index d172dd1ac8e1b..7e901f8e91588 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala @@ -23,7 +23,7 @@ import scala.language.reflectiveCalls import akka.actor._ import akka.testkit.{ImplicitSender, TestKit, TestActorRef} -import org.scalatest.{BeforeAndAfter, FunSuite} +import org.scalatest.{BeforeAndAfter, FunSuiteLike} import org.apache.spark._ import org.apache.spark.rdd.RDD @@ -37,7 +37,7 @@ class BuggyDAGEventProcessActor extends Actor { } } -class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuite +class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuiteLike with ImplicitSender with BeforeAndAfter with LocalSparkContext { val conf = new SparkConf diff --git a/pom.xml b/pom.xml index fb904e868cfaf..18e39f0a0e37a 100644 --- a/pom.xml +++ b/pom.xml @@ -447,7 +447,7 @@ org.scalatest scalatest_${scala.binary.version} - 1.9.1 + 2.1.5 test @@ -465,7 +465,7 @@ org.scalacheck scalacheck_${scala.binary.version} - 1.10.0 + 1.11.3 test diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 7f9746ec4acc0..2e18188e58f1f 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -271,8 +271,8 @@ object SparkBuild extends Build { "org.eclipse.jetty" % "jetty-util" % jettyVersion, "org.eclipse.jetty" % "jetty-plus" % jettyVersion, "org.eclipse.jetty" % "jetty-security" % jettyVersion, - "org.scalatest" %% "scalatest" % "1.9.1" % "test", - "org.scalacheck" %% "scalacheck" % "1.10.0" % "test", + "org.scalatest" %% "scalatest" % "2.1.5" % "test", + "org.scalacheck" %% "scalacheck" % "1.11.3" % "test", "com.novocode" % "junit-interface" % "0.10" % "test", "org.easymock" % "easymock" % "3.1" % "test", "org.mockito" % "mockito-all" % "1.8.5" % "test" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala index d9c9b9a076ab9..4a7716a448ca9 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.parquet import java.io.File -import org.scalatest.{BeforeAndAfterAll, FunSuite} +import org.scalatest.{BeforeAndAfterAll, FunSuiteLike} import org.apache.hadoop.fs.{Path, FileSystem} import org.apache.hadoop.mapreduce.Job @@ -42,7 +42,7 @@ import org.apache.spark.sql.test.TestSQLContext._ case class TestRDDEntry(key: Int, value: String) -class ParquetQuerySuite extends QueryTest with FunSuite with BeforeAndAfterAll { +class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll { import TestData._ TestData // Load test data tables. diff --git a/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala index 04925886c39e4..ff6d86c8f81ac 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala @@ -92,9 +92,9 @@ class BasicOperationsSuite extends TestSuiteBase { assert(second.size === 5) assert(third.size === 5) - assert(first.flatten.toSet === (1 to 100).toSet) - assert(second.flatten.toSet === (101 to 200).toSet) - assert(third.flatten.toSet === (201 to 300).toSet) + assert(first.flatten.toSet.equals((1 to 100).toSet) ) + assert(second.flatten.toSet.equals((101 to 200).toSet)) + assert(third.flatten.toSet.equals((201 to 300).toSet)) } test("repartition (fewer partitions)") { @@ -111,9 +111,9 @@ class BasicOperationsSuite extends TestSuiteBase { assert(second.size === 2) assert(third.size === 2) - assert(first.flatten.toSet === (1 to 100).toSet) - assert(second.flatten.toSet === (101 to 200).toSet) - assert(third.flatten.toSet === (201 to 300).toSet) + assert(first.flatten.toSet.equals((1 to 100).toSet)) + assert(second.flatten.toSet.equals( (101 to 200).toSet)) + assert(third.flatten.toSet.equals((201 to 300).toSet)) } test("groupByKey") { From 2c543b93fb3eb67b0e88e8fdeb5380731e68651c Mon Sep 17 00:00:00 2001 From: witgo Date: Fri, 9 May 2014 13:27:23 +0800 Subject: [PATCH 2/9] fix ReplSuite.scala --- repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala index 566d96e16ed83..b5eb6b4a01de8 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -56,12 +56,14 @@ class ReplSuite extends FunSuite { } def assertContains(message: String, output: String) { - assert(output.contains(message), + val isContain=output.contains(message) + assert(isContain, "Interpreter output did not contain '" + message + "':\n" + output) } def assertDoesNotContain(message: String, output: String) { - assert(!output.contains(message), + val isContain=output.contains(message) + assert(!isContain, "Interpreter output contained '" + message + "':\n" + output) } From 046540d983f341b3f01c1369fce9572db4e00428 Mon Sep 17 00:00:00 2001 From: witgo Date: Fri, 9 May 2014 16:31:52 +0800 Subject: [PATCH 3/9] fix RDDSuite.scala --- core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala index bfd4e55cbcac1..5544a143291a4 100644 --- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala @@ -235,9 +235,8 @@ class RDDSuite extends FunSuite with SharedSparkContext { // we can optionally shuffle to keep the upstream parallel val coalesced5 = data.coalesce(1, shuffle = true) - val bool = coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] != - null - assert(bool) + assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd. + asInstanceOf[ShuffledRDD[Int, Int, (Int, Int)]] !== null ) // when shuffling, we can increase the number of partitions val coalesced6 = data.coalesce(20, shuffle = true) From a52c0fa8dd942a0267bc674b5c7b165dbe49d4c6 Mon Sep 17 00:00:00 2001 From: witgo Date: Sat, 10 May 2014 00:50:33 +0800 Subject: [PATCH 4/9] fix test dependency --- core/pom.xml | 2 +- mllib/pom.xml | 6 ++++++ pom.xml | 11 +++++++++-- 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/core/pom.xml b/core/pom.xml index 8fe215ab24289..2a18a2ec0f764 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -234,7 +234,7 @@ org.easymock - easymock + easymockclassextension test diff --git a/mllib/pom.xml b/mllib/pom.xml index cdd33dbb7970d..27dd536b6bc43 100644 --- a/mllib/pom.xml +++ b/mllib/pom.xml @@ -50,6 +50,12 @@ org.scalanlp breeze_${scala.binary.version} 0.7 + + + junit + junit + + org.scalatest diff --git a/pom.xml b/pom.xml index 18e39f0a0e37a..d7a72a34359dd 100644 --- a/pom.xml +++ b/pom.xml @@ -452,14 +452,14 @@ org.easymock - easymock + easymockclassextension 3.1 test org.mockito mockito-all - 1.8.5 + 1.9.0 test @@ -468,6 +468,12 @@ 1.11.3 test + + junit + junit + 4.10 + test + com.novocode junit-interface @@ -733,6 +739,7 @@ -unchecked -deprecation -feature + -language:postfixOps -Xms1024m From 022a7a2176502d5f4e1c998540e457154d5115d5 Mon Sep 17 00:00:00 2001 From: witgo Date: Sat, 10 May 2014 01:00:51 +0800 Subject: [PATCH 5/9] fix test dependency --- project/SparkBuild.scala | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 2e18188e58f1f..ebb0656cfd1d3 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -266,16 +266,17 @@ object SparkBuild extends Build { */ libraryDependencies ++= Seq( - "io.netty" % "netty-all" % "4.0.17.Final", - "org.eclipse.jetty" % "jetty-server" % jettyVersion, - "org.eclipse.jetty" % "jetty-util" % jettyVersion, - "org.eclipse.jetty" % "jetty-plus" % jettyVersion, - "org.eclipse.jetty" % "jetty-security" % jettyVersion, - "org.scalatest" %% "scalatest" % "2.1.5" % "test", - "org.scalacheck" %% "scalacheck" % "1.11.3" % "test", - "com.novocode" % "junit-interface" % "0.10" % "test", - "org.easymock" % "easymock" % "3.1" % "test", - "org.mockito" % "mockito-all" % "1.8.5" % "test" + "io.netty" % "netty-all" % "4.0.17.Final", + "org.eclipse.jetty" % "jetty-server" % jettyVersion, + "org.eclipse.jetty" % "jetty-util" % jettyVersion, + "org.eclipse.jetty" % "jetty-plus" % jettyVersion, + "org.eclipse.jetty" % "jetty-security" % jettyVersion, + "org.scalatest" %% "scalatest" % "2.1.5" % "test", + "org.scalacheck" %% "scalacheck" % "1.11.3" % "test", + "com.novocode" % "junit-interface" % "0.10" % "test", + "org.easymock" % "easymockclassextension" % "3.1" % "test", + "org.mockito" % "mockito-all" % "1.9.0" % "test", + "junit" % "junit" % "4.10" % "test" ), testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"), @@ -316,6 +317,7 @@ object SparkBuild extends Build { val excludeFastutil = ExclusionRule(organization = "it.unimi.dsi") val excludeJruby = ExclusionRule(organization = "org.jruby") val excludeThrift = ExclusionRule(organization = "org.apache.thrift") + val excludeJunit = ExclusionRule(organization = "junit") def sparkPreviousArtifact(id: String, organization: String = "org.apache.spark", version: String = "0.9.0-incubating", crossVersion: String = "2.10"): Option[sbt.ModuleID] = { @@ -458,7 +460,7 @@ object SparkBuild extends Build { previousArtifact := sparkPreviousArtifact("spark-mllib"), libraryDependencies ++= Seq( "org.jblas" % "jblas" % jblasVersion, - "org.scalanlp" %% "breeze" % "0.7" + "org.scalanlp" %% "breeze" % "0.7" excludeAll(excludeJunit) ) ) @@ -469,7 +471,6 @@ object SparkBuild extends Build { // this non-deterministically. TODO: FIX THIS. parallelExecution in Test := false, libraryDependencies ++= Seq( - "org.scalatest" %% "scalatest" % "1.9.1" % "test", "com.typesafe" %% "scalalogging-slf4j" % "1.0.1" ) ) From 29b9194e66bbcb4e7fec2cb912f17317ecc21ba9 Mon Sep 17 00:00:00 2001 From: witgo Date: Fri, 16 May 2014 13:15:07 +0800 Subject: [PATCH 6/9] fix code style --- repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala index b5eb6b4a01de8..27c8e6c5aa994 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -56,13 +56,13 @@ class ReplSuite extends FunSuite { } def assertContains(message: String, output: String) { - val isContain=output.contains(message) + val isContain = output.contains(message) assert(isContain, "Interpreter output did not contain '" + message + "':\n" + output) } def assertDoesNotContain(message: String, output: String) { - val isContain=output.contains(message) + val isContain = output.contains(message) assert(!isContain, "Interpreter output contained '" + message + "':\n" + output) } From 9dfa4e77442ef8c92c0f9d582594c4b5e7a2a574 Mon Sep 17 00:00:00 2001 From: witgo Date: Fri, 16 May 2014 14:00:55 +0800 Subject: [PATCH 7/9] merge bug --- .../org/apache/spark/sql/parquet/ParquetQuerySuite.scala | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala index cd58e5c0e675a..341d55e21c9a9 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala @@ -42,9 +42,6 @@ import org.apache.spark.sql.test.TestSQLContext._ case class TestRDDEntry(key: Int, value: String) -<<<<<<< HEAD -class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll { -======= case class NullReflectData( intField: java.lang.Integer, longField: java.lang.Long, @@ -59,8 +56,7 @@ case class OptionalReflectData( doubleField: Option[Double], booleanField: Option[Boolean]) -class ParquetQuerySuite extends QueryTest with FunSuite with BeforeAndAfterAll { ->>>>>>> a20fea98811d98958567780815fcf0d4fb4e28d4 +class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll { import TestData._ TestData // Load test data tables. From 996d6f9aed4a8a6b617d9beb3a04319f809fc974 Mon Sep 17 00:00:00 2001 From: witgo Date: Fri, 16 May 2014 15:11:04 +0800 Subject: [PATCH 8/9] fix TimeStampedWeakValueHashMap test --- .../scala/org/apache/spark/util/TimeStampedHashMapSuite.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala index 6a5653ed2fb54..c1c605cdb487b 100644 --- a/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala @@ -105,7 +105,8 @@ class TimeStampedHashMapSuite extends FunSuite { map("k1") = strongRef map("k2") = "v2" map("k3") = "v3" - assert(map("k1") === strongRef) + val isEquals = map("k1") == strongRef + assert(isEquals) // clear strong reference to "k1" strongRef = null From 377147424f0f46d7c7532c8127726dc3c9546395 Mon Sep 17 00:00:00 2001 From: witgo Date: Fri, 16 May 2014 15:18:11 +0800 Subject: [PATCH 9/9] fix RDDSuite --- core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala index cf1d49eb85bc8..7c3e5fc9e293d 100644 --- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala @@ -268,8 +268,9 @@ class RDDSuite extends FunSuite with SharedSparkContext { // we can optionally shuffle to keep the upstream parallel val coalesced5 = data.coalesce(1, shuffle = true) - assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd. - asInstanceOf[ShuffledRDD[Int, Int, (Int, Int)]] !== null ) + val isEquals = coalesced5.dependencies.head.rdd.dependencies.head.rdd. + asInstanceOf[ShuffledRDD[_, _, _]] != null + assert(isEquals) // when shuffling, we can increase the number of partitions val coalesced6 = data.coalesce(20, shuffle = true)