@@ -33,34 +33,29 @@ class FlatmapIteratorSuite extends FunSuite with LocalSparkContext {
33
33
* info from the serializer, and allow old objects to be GC'd
34
34
*/
35
35
test(" Flatmap Iterator to Disk" ) {
36
- val sconf = new SparkConf ().setMaster(" local-cluster[1,1,512]" )
37
- .setAppName(" iterator_to_disk_test" )
36
+ val sconf = new SparkConf ().setMaster(" local" ).setAppName(" iterator_to_disk_test" )
38
37
sc = new SparkContext (sconf)
39
38
val expand_size = 100
40
39
val data = sc.parallelize((1 to 5 ).toSeq).
41
40
flatMap( x => Stream .range(0 , expand_size))
42
41
var persisted = data.persist(StorageLevel .DISK_ONLY )
43
- println(persisted.count())
44
42
assert(persisted.count()=== 500 )
45
43
assert(persisted.filter(_== 1 ).count()=== 5 )
46
44
}
47
45
48
46
test(" Flatmap Iterator to Memory" ) {
49
- val sconf = new SparkConf ().setMaster(" local-cluster[1,1,512]" )
50
- .setAppName(" iterator_to_disk_test" )
47
+ val sconf = new SparkConf ().setMaster(" local" ).setAppName(" iterator_to_disk_test" )
51
48
sc = new SparkContext (sconf)
52
49
val expand_size = 100
53
50
val data = sc.parallelize((1 to 5 ).toSeq).
54
51
flatMap(x => Stream .range(0 , expand_size))
55
52
var persisted = data.persist(StorageLevel .MEMORY_ONLY )
56
- println(persisted.count())
57
53
assert(persisted.count()=== 500 )
58
54
assert(persisted.filter(_== 1 ).count()=== 5 )
59
55
}
60
56
61
57
test(" Serializer Reset" ) {
62
- val sconf = new SparkConf ().setMaster(" local-cluster[1,1,512]" )
63
- .setAppName(" serializer_reset_test" )
58
+ val sconf = new SparkConf ().setMaster(" local" ).setAppName(" serializer_reset_test" )
64
59
.set(" spark.serializer.objectStreamReset" , " 10" )
65
60
sc = new SparkContext (sconf)
66
61
val expand_size = 500
0 commit comments