diff --git a/core/pom.xml b/core/pom.xml
index 0c746175afa73..c3d6b00a443f1 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -235,7 +235,7 @@
org.easymock
- easymock
+ easymockclassextension
test
diff --git a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
index 5a8310090890d..dc2db66df60e0 100644
--- a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
@@ -25,7 +25,7 @@ import scala.language.postfixOps
import scala.util.Random
import org.scalatest.{BeforeAndAfter, FunSuite}
-import org.scalatest.concurrent.Eventually
+import org.scalatest.concurrent.{PatienceConfiguration, Eventually}
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
@@ -76,7 +76,7 @@ class ContextCleanerSuite extends FunSuite with BeforeAndAfter with LocalSparkCo
tester.assertCleanup()
// Verify that shuffles can be re-executed after cleaning up
- assert(rdd.collect().toList === collected)
+ assert(rdd.collect().toList.equals(collected))
}
test("cleanup broadcast") {
@@ -285,7 +285,7 @@ class CleanerTester(
sc.cleaner.get.attachListener(cleanerListener)
/** Assert that all the stuff has been cleaned up */
- def assertCleanup()(implicit waitTimeout: Eventually.Timeout) {
+ def assertCleanup()(implicit waitTimeout: PatienceConfiguration.Timeout) {
try {
eventually(waitTimeout, interval(100 millis)) {
assert(isAllCleanedUp)
diff --git a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
index 29d428aa7dc41..47df00050c1e2 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
@@ -23,11 +23,11 @@ class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll {
// This test suite should run all tests in ShuffleSuite with Netty shuffle mode.
- override def beforeAll(configMap: Map[String, Any]) {
+ override def beforeAll() {
System.setProperty("spark.shuffle.use.netty", "true")
}
- override def afterAll(configMap: Map[String, Any]) {
+ override def afterAll() {
System.setProperty("spark.shuffle.use.netty", "false")
}
}
diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
index 286e221e33b78..55af1666df662 100644
--- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
@@ -266,8 +266,9 @@ class RDDSuite extends FunSuite with SharedSparkContext {
// we can optionally shuffle to keep the upstream parallel
val coalesced5 = data.coalesce(1, shuffle = true)
- assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] !=
- null)
+ val isEquals = coalesced5.dependencies.head.rdd.dependencies.head.rdd.
+ asInstanceOf[ShuffledRDD[_, _, _]] != null
+ assert(isEquals)
// when shuffling, we can increase the number of partitions
val coalesced6 = data.coalesce(20, shuffle = true)
diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
index 81e64c1846ed5..7506d56d7e26d 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
@@ -23,7 +23,7 @@ import scala.language.reflectiveCalls
import akka.actor._
import akka.testkit.{ImplicitSender, TestKit, TestActorRef}
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.{BeforeAndAfter, FunSuiteLike}
import org.apache.spark._
import org.apache.spark.rdd.RDD
@@ -37,7 +37,7 @@ class BuggyDAGEventProcessActor extends Actor {
}
}
-class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuite
+class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuiteLike
with ImplicitSender with BeforeAndAfter with LocalSparkContext {
val conf = new SparkConf
diff --git a/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala
index 6a5653ed2fb54..c1c605cdb487b 100644
--- a/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala
@@ -105,7 +105,8 @@ class TimeStampedHashMapSuite extends FunSuite {
map("k1") = strongRef
map("k2") = "v2"
map("k3") = "v3"
- assert(map("k1") === strongRef)
+ val isEquals = map("k1") == strongRef
+ assert(isEquals)
// clear strong reference to "k1"
strongRef = null
diff --git a/pom.xml b/pom.xml
index 87c8e29ad1069..891468b21bfff 100644
--- a/pom.xml
+++ b/pom.xml
@@ -459,25 +459,31 @@
org.scalatest
scalatest_${scala.binary.version}
- 1.9.1
+ 2.1.5
test
org.easymock
- easymock
+ easymockclassextension
3.1
test
org.mockito
mockito-all
- 1.8.5
+ 1.9.0
test
org.scalacheck
scalacheck_${scala.binary.version}
- 1.10.0
+ 1.11.3
+ test
+
+
+ junit
+ junit
+ 4.10
test
@@ -779,6 +785,7 @@
-unchecked
-deprecation
-feature
+ -language:postfixOps
-Xms1024m
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index d0049a8ac43aa..069913dbaac56 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -270,16 +270,17 @@ object SparkBuild extends Build {
*/
libraryDependencies ++= Seq(
- "io.netty" % "netty-all" % "4.0.17.Final",
- "org.eclipse.jetty" % "jetty-server" % jettyVersion,
- "org.eclipse.jetty" % "jetty-util" % jettyVersion,
- "org.eclipse.jetty" % "jetty-plus" % jettyVersion,
- "org.eclipse.jetty" % "jetty-security" % jettyVersion,
- "org.scalatest" %% "scalatest" % "1.9.1" % "test",
- "org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
- "com.novocode" % "junit-interface" % "0.10" % "test",
- "org.easymock" % "easymock" % "3.1" % "test",
- "org.mockito" % "mockito-all" % "1.8.5" % "test"
+ "io.netty" % "netty-all" % "4.0.17.Final",
+ "org.eclipse.jetty" % "jetty-server" % jettyVersion,
+ "org.eclipse.jetty" % "jetty-util" % jettyVersion,
+ "org.eclipse.jetty" % "jetty-plus" % jettyVersion,
+ "org.eclipse.jetty" % "jetty-security" % jettyVersion,
+ "org.scalatest" %% "scalatest" % "2.1.5" % "test",
+ "org.scalacheck" %% "scalacheck" % "1.11.3" % "test",
+ "com.novocode" % "junit-interface" % "0.10" % "test",
+ "org.easymock" % "easymockclassextension" % "3.1" % "test",
+ "org.mockito" % "mockito-all" % "1.9.0" % "test",
+ "junit" % "junit" % "4.10" % "test"
),
testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
@@ -478,7 +479,6 @@ object SparkBuild extends Build {
// this non-deterministically. TODO: FIX THIS.
parallelExecution in Test := false,
libraryDependencies ++= Seq(
- "org.scalatest" %% "scalatest" % "1.9.1" % "test",
"com.typesafe" %% "scalalogging-slf4j" % "1.0.1"
)
)
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index 7c765edd55027..f4ba8d9cc079b 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -67,12 +67,14 @@ class ReplSuite extends FunSuite {
}
def assertContains(message: String, output: String) {
- assert(output.contains(message),
+ val isContain = output.contains(message)
+ assert(isContain,
"Interpreter output did not contain '" + message + "':\n" + output)
}
def assertDoesNotContain(message: String, output: String) {
- assert(!output.contains(message),
+ val isContain = output.contains(message)
+ assert(!isContain,
"Interpreter output contained '" + message + "':\n" + output)
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
index b973ceba5fec0..9810520bb9ae6 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.parquet
-import org.scalatest.{BeforeAndAfterAll, FunSuite}
+import org.scalatest.{BeforeAndAfterAll, FunSuiteLike}
import org.apache.hadoop.fs.{Path, FileSystem}
import org.apache.hadoop.mapreduce.Job
@@ -56,7 +56,7 @@ case class OptionalReflectData(
doubleField: Option[Double],
booleanField: Option[Boolean])
-class ParquetQuerySuite extends QueryTest with FunSuite with BeforeAndAfterAll {
+class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
import TestData._
TestData // Load test data tables.
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala
index 04925886c39e4..ff6d86c8f81ac 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala
@@ -92,9 +92,9 @@ class BasicOperationsSuite extends TestSuiteBase {
assert(second.size === 5)
assert(third.size === 5)
- assert(first.flatten.toSet === (1 to 100).toSet)
- assert(second.flatten.toSet === (101 to 200).toSet)
- assert(third.flatten.toSet === (201 to 300).toSet)
+ assert(first.flatten.toSet.equals((1 to 100).toSet) )
+ assert(second.flatten.toSet.equals((101 to 200).toSet))
+ assert(third.flatten.toSet.equals((201 to 300).toSet))
}
test("repartition (fewer partitions)") {
@@ -111,9 +111,9 @@ class BasicOperationsSuite extends TestSuiteBase {
assert(second.size === 2)
assert(third.size === 2)
- assert(first.flatten.toSet === (1 to 100).toSet)
- assert(second.flatten.toSet === (101 to 200).toSet)
- assert(third.flatten.toSet === (201 to 300).toSet)
+ assert(first.flatten.toSet.equals((1 to 100).toSet))
+ assert(second.flatten.toSet.equals( (101 to 200).toSet))
+ assert(third.flatten.toSet.equals((201 to 300).toSet))
}
test("groupByKey") {