Skip to content

Commit 056deef

Browse files
witgomarkhamstra
authored andcommitted
[SPARK-1841]: update scalatest to version 2.1.5
Author: witgo <[email protected]> Closes apache#713 from witgo/scalatest and squashes the following commits: b627a6a [witgo] merge master 51fb3d6 [witgo] merge master 3771474 [witgo] fix RDDSuite 996d6f9 [witgo] fix TimeStampedWeakValueHashMap test 9dfa4e7 [witgo] merge bug 1479b22 [witgo] merge master 29b9194 [witgo] fix code style 022a7a2 [witgo] fix test dependency a52c0fa [witgo] fix test dependency cd8f59d [witgo] Merge branch 'master' of https://github.com/apache/spark into scalatest 046540d [witgo] fix RDDSuite.scala 2c543b9 [witgo] fix ReplSuite.scala c458928 [witgo] update scalatest to version 2.1.5 Conflicts: core/pom.xml core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala pom.xml project/SparkBuild.scala sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
1 parent 8a6739b commit 056deef

File tree

8 files changed

+35
-23
lines changed

8 files changed

+35
-23
lines changed

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -174,7 +174,7 @@
174174
</dependency>
175175
<dependency>
176176
<groupId>org.easymock</groupId>
177-
<artifactId>easymock</artifactId>
177+
<artifactId>easymockclassextension</artifactId>
178178
<scope>test</scope>
179179
</dependency>
180180
<dependency>

core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,11 +24,11 @@ class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll {
2424

2525
// This test suite should run all tests in ShuffleSuite with Netty shuffle mode.
2626

27-
override def beforeAll(configMap: Map[String, Any]) {
27+
override def beforeAll() {
2828
System.setProperty("spark.shuffle.use.netty", "true")
2929
}
3030

31-
override def afterAll(configMap: Map[String, Any]) {
31+
override def afterAll() {
3232
System.setProperty("spark.shuffle.use.netty", "false")
3333
}
3434
}

core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -233,8 +233,9 @@ class RDDSuite extends FunSuite with SharedSparkContext {
233233

234234
// we can optionally shuffle to keep the upstream parallel
235235
val coalesced5 = data.coalesce(1, shuffle = true)
236-
assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] !=
237-
null)
236+
val isEquals = coalesced5.dependencies.head.rdd.dependencies.head.rdd.
237+
asInstanceOf[ShuffledRDD[_, _, _]] != null
238+
assert(isEquals)
238239

239240
// when shuffling, we can increase the number of partitions
240241
val coalesced6 = data.coalesce(20, shuffle = true)

core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ import org.apache.spark.{FetchFailed, Success, TaskEndReason}
3131
import org.apache.spark.rdd.RDD
3232
import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
3333
import org.apache.spark.storage.{BlockId, BlockManagerId, BlockManagerMaster}
34-
import org.scalatest.{BeforeAndAfter, FunSuite}
34+
import org.scalatest.{BeforeAndAfter, FunSuiteLike}
3535

3636
/**
3737
* Tests for DAGScheduler. These tests directly call the event processing functions in DAGScheduler
@@ -45,7 +45,7 @@ import org.scalatest.{BeforeAndAfter, FunSuite}
4545
* DAGScheduler#submitWaitingStages (via test utility functions like runEvent or respondToTaskSet)
4646
* and capturing the resulting TaskSets from the mock TaskScheduler.
4747
*/
48-
class DAGSchedulerSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
48+
class DAGSchedulerSuite extends FunSuiteLike with BeforeAndAfter with LocalSparkContext {
4949
val conf = new SparkConf
5050
/** Set of TaskSets the DAGScheduler has requested executed. */
5151
val taskSets = scala.collection.mutable.Buffer[TaskSet]()

pom.xml

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -378,7 +378,7 @@
378378
<dependency>
379379
<groupId>org.scalatest</groupId>
380380
<artifactId>scalatest_${scala.binary.version}</artifactId>
381-
<version>1.9.1</version>
381+
<version>2.1.5</version>
382382
<scope>test</scope>
383383
</dependency>
384384
<dependency>
@@ -394,20 +394,26 @@
394394
</dependency>
395395
<dependency>
396396
<groupId>org.easymock</groupId>
397-
<artifactId>easymock</artifactId>
397+
<artifactId>easymockclassextension</artifactId>
398398
<version>3.1</version>
399399
<scope>test</scope>
400400
</dependency>
401401
<dependency>
402402
<groupId>org.mockito</groupId>
403403
<artifactId>mockito-all</artifactId>
404-
<version>1.8.5</version>
404+
<version>1.9.0</version>
405405
<scope>test</scope>
406406
</dependency>
407407
<dependency>
408408
<groupId>org.scalacheck</groupId>
409409
<artifactId>scalacheck_${scala.binary.version}</artifactId>
410-
<version>1.10.0</version>
410+
<version>1.11.3</version>
411+
<scope>test</scope>
412+
</dependency>
413+
<dependency>
414+
<groupId>junit</groupId>
415+
<artifactId>junit</artifactId>
416+
<version>4.10</version>
411417
<scope>test</scope>
412418
</dependency>
413419
<dependency>
@@ -614,6 +620,8 @@
614620
<args>
615621
<arg>-unchecked</arg>
616622
<arg>-deprecation</arg>
623+
<arg>-feature</arg>
624+
<arg>-language:postfixOps</arg>
617625
</args>
618626
<jvmArgs>
619627
<jvmArg>-Xms64m</jvmArg>

project/SparkBuild.scala

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -224,12 +224,13 @@ object SparkBuild extends Build {
224224
"org.eclipse.jetty" % "jetty-server" % "7.6.8.v20121106",
225225
/** Workaround for SPARK-959. Dependency used by org.eclipse.jetty. Fixed in ivy 2.3.0. */
226226
"org.eclipse.jetty.orbit" % "javax.servlet" % "2.5.0.v201103041518" artifacts Artifact("javax.servlet", "jar", "jar"),
227-
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
228-
"org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
227+
"org.scalatest" %% "scalatest" % "2.1.5" % "test",
228+
"org.scalacheck" %% "scalacheck" % "1.11.3" % "test",
229229
"com.novocode" % "junit-interface" % "0.10" % "test",
230-
"org.easymock" % "easymock" % "3.1" % "test",
231-
"org.mockito" % "mockito-all" % "1.8.5" % "test",
230+
"org.easymock" % "easymockclassextension" % "3.1" % "test",
231+
"org.mockito" % "mockito-all" % "1.9.0" % "test",
232232
"commons-io" % "commons-io" % "2.4" % "test"
233+
"junit" % "junit" % "4.10" % "test"
233234
),
234235

235236
testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),

repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,12 +56,14 @@ class ReplSuite extends FunSuite {
5656
}
5757

5858
def assertContains(message: String, output: String) {
59-
assert(output.contains(message),
59+
val isContain = output.contains(message)
60+
assert(isContain,
6061
"Interpreter output did not contain '" + message + "':\n" + output)
6162
}
6263

6364
def assertDoesNotContain(message: String, output: String) {
64-
assert(!output.contains(message),
65+
val isContain = output.contains(message)
66+
assert(!isContain,
6567
"Interpreter output contained '" + message + "':\n" + output)
6668
}
6769

streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -90,9 +90,9 @@ class BasicOperationsSuite extends TestSuiteBase {
9090
assert(second.size === 5)
9191
assert(third.size === 5)
9292

93-
assert(first.flatten.toSet === (1 to 100).toSet)
94-
assert(second.flatten.toSet === (101 to 200).toSet)
95-
assert(third.flatten.toSet === (201 to 300).toSet)
93+
assert(first.flatten.toSet.equals((1 to 100).toSet) )
94+
assert(second.flatten.toSet.equals((101 to 200).toSet))
95+
assert(third.flatten.toSet.equals((201 to 300).toSet))
9696
}
9797

9898
test("repartition (fewer partitions)") {
@@ -109,9 +109,9 @@ class BasicOperationsSuite extends TestSuiteBase {
109109
assert(second.size === 2)
110110
assert(third.size === 2)
111111

112-
assert(first.flatten.toSet === (1 to 100).toSet)
113-
assert(second.flatten.toSet === (101 to 200).toSet)
114-
assert(third.flatten.toSet === (201 to 300).toSet)
112+
assert(first.flatten.toSet.equals((1 to 100).toSet))
113+
assert(second.flatten.toSet.equals( (101 to 200).toSet))
114+
assert(third.flatten.toSet.equals((201 to 300).toSet))
115115
}
116116

117117
test("groupByKey") {

0 commit comments

Comments
 (0)