@@ -88,7 +88,7 @@ class TaskSetManagerSuite extends FunSuite with LocalSparkContext with Logging {
88
88
test(" TaskSet with no preferences" ) {
89
89
sc = new SparkContext (" local" , " test" )
90
90
val sched = new FakeTaskScheduler (sc, (" exec1" , " host1" ))
91
- val taskSet = createTaskSet(1 )
91
+ val taskSet = FakeTask . createTaskSet(1 )
92
92
val manager = new TaskSetManager (sched, taskSet, MAX_TASK_FAILURES )
93
93
94
94
// Offer a host with no CPUs
@@ -114,7 +114,7 @@ class TaskSetManagerSuite extends FunSuite with LocalSparkContext with Logging {
114
114
test(" multiple offers with no preferences" ) {
115
115
sc = new SparkContext (" local" , " test" )
116
116
val sched = new FakeTaskScheduler (sc, (" exec1" , " host1" ))
117
- val taskSet = createTaskSet(3 )
117
+ val taskSet = FakeTask . createTaskSet(3 )
118
118
val manager = new TaskSetManager (sched, taskSet, MAX_TASK_FAILURES )
119
119
120
120
// First three offers should all find tasks
@@ -145,7 +145,7 @@ class TaskSetManagerSuite extends FunSuite with LocalSparkContext with Logging {
145
145
test(" basic delay scheduling" ) {
146
146
sc = new SparkContext (" local" , " test" )
147
147
val sched = new FakeTaskScheduler (sc, (" exec1" , " host1" ), (" exec2" , " host2" ))
148
- val taskSet = createTaskSet(4 ,
148
+ val taskSet = FakeTask . createTaskSet(4 ,
149
149
Seq (TaskLocation (" host1" , " exec1" )),
150
150
Seq (TaskLocation (" host2" , " exec2" )),
151
151
Seq (TaskLocation (" host1" ), TaskLocation (" host2" , " exec2" )),
@@ -190,7 +190,7 @@ class TaskSetManagerSuite extends FunSuite with LocalSparkContext with Logging {
190
190
sc = new SparkContext (" local" , " test" )
191
191
val sched = new FakeTaskScheduler (sc,
192
192
(" exec1" , " host1" ), (" exec2" , " host2" ), (" exec3" , " host3" ))
193
- val taskSet = createTaskSet(5 ,
193
+ val taskSet = FakeTask . createTaskSet(5 ,
194
194
Seq (TaskLocation (" host1" )),
195
195
Seq (TaskLocation (" host2" )),
196
196
Seq (TaskLocation (" host2" )),
@@ -229,7 +229,7 @@ class TaskSetManagerSuite extends FunSuite with LocalSparkContext with Logging {
229
229
test(" delay scheduling with failed hosts" ) {
230
230
sc = new SparkContext (" local" , " test" )
231
231
val sched = new FakeTaskScheduler (sc, (" exec1" , " host1" ), (" exec2" , " host2" ))
232
- val taskSet = createTaskSet(3 ,
232
+ val taskSet = FakeTask . createTaskSet(3 ,
233
233
Seq (TaskLocation (" host1" )),
234
234
Seq (TaskLocation (" host2" )),
235
235
Seq (TaskLocation (" host3" ))
@@ -261,7 +261,7 @@ class TaskSetManagerSuite extends FunSuite with LocalSparkContext with Logging {
261
261
test(" task result lost" ) {
262
262
sc = new SparkContext (" local" , " test" )
263
263
val sched = new FakeTaskScheduler (sc, (" exec1" , " host1" ))
264
- val taskSet = createTaskSet(1 )
264
+ val taskSet = FakeTask . createTaskSet(1 )
265
265
val clock = new FakeClock
266
266
val manager = new TaskSetManager (sched, taskSet, MAX_TASK_FAILURES , clock)
267
267
@@ -278,7 +278,7 @@ class TaskSetManagerSuite extends FunSuite with LocalSparkContext with Logging {
278
278
test(" repeated failures lead to task set abortion" ) {
279
279
sc = new SparkContext (" local" , " test" )
280
280
val sched = new FakeTaskScheduler (sc, (" exec1" , " host1" ))
281
- val taskSet = createTaskSet(1 )
281
+ val taskSet = FakeTask . createTaskSet(1 )
282
282
val clock = new FakeClock
283
283
val manager = new TaskSetManager (sched, taskSet, MAX_TASK_FAILURES , clock)
284
284
@@ -298,21 +298,6 @@ class TaskSetManagerSuite extends FunSuite with LocalSparkContext with Logging {
298
298
}
299
299
}
300
300
301
-
302
- /**
303
- * Utility method to create a TaskSet, potentially setting a particular sequence of preferred
304
- * locations for each task (given as varargs) if this sequence is not empty.
305
- */
306
- def createTaskSet (numTasks : Int , prefLocs : Seq [TaskLocation ]* ): TaskSet = {
307
- if (prefLocs.size != 0 && prefLocs.size != numTasks) {
308
- throw new IllegalArgumentException (" Wrong number of task locations" )
309
- }
310
- val tasks = Array .tabulate[Task [_]](numTasks) { i =>
311
- new FakeTask (i, if (prefLocs.size != 0 ) prefLocs(i) else Nil )
312
- }
313
- new TaskSet (tasks, 0 , 0 , 0 , null )
314
- }
315
-
316
301
def createTaskResult (id : Int ): DirectTaskResult [Int ] = {
317
302
val valueSer = SparkEnv .get.serializer.newInstance()
318
303
new DirectTaskResult [Int ](valueSer.serialize(id), mutable.Map .empty, new TaskMetrics )
0 commit comments