Skip to content

Commit c8ad296

Browse files
witgoconviva-zz
authored andcommitted
[SPARK-1841]: update scalatest to version 2.1.5
Author: witgo <[email protected]> Closes apache#713 from witgo/scalatest and squashes the following commits: b627a6a [witgo] merge master 51fb3d6 [witgo] merge master 3771474 [witgo] fix RDDSuite 996d6f9 [witgo] fix TimeStampedWeakValueHashMap test 9dfa4e7 [witgo] merge bug 1479b22 [witgo] merge master 29b9194 [witgo] fix code style 022a7a2 [witgo] fix test dependency a52c0fa [witgo] fix test dependency cd8f59d [witgo] Merge branch 'master' of https://github.com/apache/spark into scalatest 046540d [witgo] fix RDDSuite.scala 2c543b9 [witgo] fix ReplSuite.scala c458928 [witgo] update scalatest to version 2.1.5
1 parent 86f5ab3 commit c8ad296

File tree

11 files changed

+47
-36
lines changed

11 files changed

+47
-36
lines changed

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,7 @@
235235
</dependency>
236236
<dependency>
237237
<groupId>org.easymock</groupId>
238-
<artifactId>easymock</artifactId>
238+
<artifactId>easymockclassextension</artifactId>
239239
<scope>test</scope>
240240
</dependency>
241241
<dependency>

core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import scala.language.postfixOps
2525
import scala.util.Random
2626

2727
import org.scalatest.{BeforeAndAfter, FunSuite}
28-
import org.scalatest.concurrent.Eventually
28+
import org.scalatest.concurrent.{PatienceConfiguration, Eventually}
2929
import org.scalatest.concurrent.Eventually._
3030
import org.scalatest.time.SpanSugar._
3131

@@ -76,7 +76,7 @@ class ContextCleanerSuite extends FunSuite with BeforeAndAfter with LocalSparkCo
7676
tester.assertCleanup()
7777

7878
// Verify that shuffles can be re-executed after cleaning up
79-
assert(rdd.collect().toList === collected)
79+
assert(rdd.collect().toList.equals(collected))
8080
}
8181

8282
test("cleanup broadcast") {
@@ -285,7 +285,7 @@ class CleanerTester(
285285
sc.cleaner.get.attachListener(cleanerListener)
286286

287287
/** Assert that all the stuff has been cleaned up */
288-
def assertCleanup()(implicit waitTimeout: Eventually.Timeout) {
288+
def assertCleanup()(implicit waitTimeout: PatienceConfiguration.Timeout) {
289289
try {
290290
eventually(waitTimeout, interval(100 millis)) {
291291
assert(isAllCleanedUp)

core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,11 +23,11 @@ class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll {
2323

2424
// This test suite should run all tests in ShuffleSuite with Netty shuffle mode.
2525

26-
override def beforeAll(configMap: Map[String, Any]) {
26+
override def beforeAll() {
2727
System.setProperty("spark.shuffle.use.netty", "true")
2828
}
2929

30-
override def afterAll(configMap: Map[String, Any]) {
30+
override def afterAll() {
3131
System.setProperty("spark.shuffle.use.netty", "false")
3232
}
3333
}

core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -266,8 +266,9 @@ class RDDSuite extends FunSuite with SharedSparkContext {
266266

267267
// we can optionally shuffle to keep the upstream parallel
268268
val coalesced5 = data.coalesce(1, shuffle = true)
269-
assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] !=
270-
null)
269+
val isEquals = coalesced5.dependencies.head.rdd.dependencies.head.rdd.
270+
asInstanceOf[ShuffledRDD[_, _, _]] != null
271+
assert(isEquals)
271272

272273
// when shuffling, we can increase the number of partitions
273274
val coalesced6 = data.coalesce(20, shuffle = true)

core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import scala.language.reflectiveCalls
2323

2424
import akka.actor._
2525
import akka.testkit.{ImplicitSender, TestKit, TestActorRef}
26-
import org.scalatest.{BeforeAndAfter, FunSuite}
26+
import org.scalatest.{BeforeAndAfter, FunSuiteLike}
2727

2828
import org.apache.spark._
2929
import org.apache.spark.rdd.RDD
@@ -37,7 +37,7 @@ class BuggyDAGEventProcessActor extends Actor {
3737
}
3838
}
3939

40-
class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuite
40+
class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuiteLike
4141
with ImplicitSender with BeforeAndAfter with LocalSparkContext {
4242

4343
val conf = new SparkConf

core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,8 @@ class TimeStampedHashMapSuite extends FunSuite {
105105
map("k1") = strongRef
106106
map("k2") = "v2"
107107
map("k3") = "v3"
108-
assert(map("k1") === strongRef)
108+
val isEquals = map("k1") == strongRef
109+
assert(isEquals)
109110

110111
// clear strong reference to "k1"
111112
strongRef = null

pom.xml

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -459,25 +459,31 @@
459459
<dependency>
460460
<groupId>org.scalatest</groupId>
461461
<artifactId>scalatest_${scala.binary.version}</artifactId>
462-
<version>1.9.1</version>
462+
<version>2.1.5</version>
463463
<scope>test</scope>
464464
</dependency>
465465
<dependency>
466466
<groupId>org.easymock</groupId>
467-
<artifactId>easymock</artifactId>
467+
<artifactId>easymockclassextension</artifactId>
468468
<version>3.1</version>
469469
<scope>test</scope>
470470
</dependency>
471471
<dependency>
472472
<groupId>org.mockito</groupId>
473473
<artifactId>mockito-all</artifactId>
474-
<version>1.8.5</version>
474+
<version>1.9.0</version>
475475
<scope>test</scope>
476476
</dependency>
477477
<dependency>
478478
<groupId>org.scalacheck</groupId>
479479
<artifactId>scalacheck_${scala.binary.version}</artifactId>
480-
<version>1.10.0</version>
480+
<version>1.11.3</version>
481+
<scope>test</scope>
482+
</dependency>
483+
<dependency>
484+
<groupId>junit</groupId>
485+
<artifactId>junit</artifactId>
486+
<version>4.10</version>
481487
<scope>test</scope>
482488
</dependency>
483489
<dependency>
@@ -779,6 +785,7 @@
779785
<arg>-unchecked</arg>
780786
<arg>-deprecation</arg>
781787
<arg>-feature</arg>
788+
<arg>-language:postfixOps</arg>
782789
</args>
783790
<jvmArgs>
784791
<jvmArg>-Xms1024m</jvmArg>

project/SparkBuild.scala

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -270,16 +270,17 @@ object SparkBuild extends Build {
270270
*/
271271

272272
libraryDependencies ++= Seq(
273-
"io.netty" % "netty-all" % "4.0.17.Final",
274-
"org.eclipse.jetty" % "jetty-server" % jettyVersion,
275-
"org.eclipse.jetty" % "jetty-util" % jettyVersion,
276-
"org.eclipse.jetty" % "jetty-plus" % jettyVersion,
277-
"org.eclipse.jetty" % "jetty-security" % jettyVersion,
278-
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
279-
"org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
280-
"com.novocode" % "junit-interface" % "0.10" % "test",
281-
"org.easymock" % "easymock" % "3.1" % "test",
282-
"org.mockito" % "mockito-all" % "1.8.5" % "test"
273+
"io.netty" % "netty-all" % "4.0.17.Final",
274+
"org.eclipse.jetty" % "jetty-server" % jettyVersion,
275+
"org.eclipse.jetty" % "jetty-util" % jettyVersion,
276+
"org.eclipse.jetty" % "jetty-plus" % jettyVersion,
277+
"org.eclipse.jetty" % "jetty-security" % jettyVersion,
278+
"org.scalatest" %% "scalatest" % "2.1.5" % "test",
279+
"org.scalacheck" %% "scalacheck" % "1.11.3" % "test",
280+
"com.novocode" % "junit-interface" % "0.10" % "test",
281+
"org.easymock" % "easymockclassextension" % "3.1" % "test",
282+
"org.mockito" % "mockito-all" % "1.9.0" % "test",
283+
"junit" % "junit" % "4.10" % "test"
283284
),
284285

285286
testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
@@ -478,7 +479,6 @@ object SparkBuild extends Build {
478479
// this non-deterministically. TODO: FIX THIS.
479480
parallelExecution in Test := false,
480481
libraryDependencies ++= Seq(
481-
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
482482
"com.typesafe" %% "scalalogging-slf4j" % "1.0.1"
483483
)
484484
)

repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,12 +67,14 @@ class ReplSuite extends FunSuite {
6767
}
6868

6969
def assertContains(message: String, output: String) {
70-
assert(output.contains(message),
70+
val isContain = output.contains(message)
71+
assert(isContain,
7172
"Interpreter output did not contain '" + message + "':\n" + output)
7273
}
7374

7475
def assertDoesNotContain(message: String, output: String) {
75-
assert(!output.contains(message),
76+
val isContain = output.contains(message)
77+
assert(!isContain,
7678
"Interpreter output contained '" + message + "':\n" + output)
7779
}
7880

sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql.parquet
1919

20-
import org.scalatest.{BeforeAndAfterAll, FunSuite}
20+
import org.scalatest.{BeforeAndAfterAll, FunSuiteLike}
2121

2222
import org.apache.hadoop.fs.{Path, FileSystem}
2323
import org.apache.hadoop.mapreduce.Job
@@ -56,7 +56,7 @@ case class OptionalReflectData(
5656
doubleField: Option[Double],
5757
booleanField: Option[Boolean])
5858

59-
class ParquetQuerySuite extends QueryTest with FunSuite with BeforeAndAfterAll {
59+
class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
6060
import TestData._
6161
TestData // Load test data tables.
6262

0 commit comments

Comments
 (0)