@@ -26,6 +26,7 @@ import org.apache.spark.scheduler._
26
26
import org .apache .spark .util .Utils
27
27
28
28
class JobProgressListenerSuite extends FunSuite with LocalSparkContext with Matchers {
29
+
29
30
test(" test LRU eviction of stages" ) {
30
31
val conf = new SparkConf ()
31
32
conf.set(" spark.ui.retainedStages" , 5 .toString)
@@ -66,7 +67,7 @@ class JobProgressListenerSuite extends FunSuite with LocalSparkContext with Matc
66
67
taskMetrics.updateShuffleReadMetrics(shuffleReadMetrics)
67
68
var taskInfo = new TaskInfo (1234L , 0 , 1 , 0L , " exe-1" , " host1" , TaskLocality .NODE_LOCAL , false )
68
69
taskInfo.finishTime = 1
69
- var task = new ShuffleMapTask (0 , null , null , 0 , null )
70
+ var task = new ShuffleMapTask (0 )
70
71
val taskType = Utils .getFormattedClassName(task)
71
72
listener.onTaskEnd(SparkListenerTaskEnd (task.stageId, taskType, Success , taskInfo, taskMetrics))
72
73
assert(listener.stageIdToData.getOrElse(0 , fail()).executorSummary.getOrElse(" exe-1" , fail())
@@ -76,22 +77,22 @@ class JobProgressListenerSuite extends FunSuite with LocalSparkContext with Matc
76
77
taskInfo =
77
78
new TaskInfo (1234L , 0 , 1 , 1000L , " exe-unknown" , " host1" , TaskLocality .NODE_LOCAL , true )
78
79
taskInfo.finishTime = 1
79
- task = new ShuffleMapTask (0 , null , null , 0 , null )
80
+ task = new ShuffleMapTask (0 )
80
81
listener.onTaskEnd(SparkListenerTaskEnd (task.stageId, taskType, Success , taskInfo, taskMetrics))
81
82
assert(listener.stageIdToData.size === 1 )
82
83
83
84
// finish this task, should get updated duration
84
85
taskInfo = new TaskInfo (1235L , 0 , 1 , 0L , " exe-1" , " host1" , TaskLocality .NODE_LOCAL , false )
85
86
taskInfo.finishTime = 1
86
- task = new ShuffleMapTask (0 , null , null , 0 , null )
87
+ task = new ShuffleMapTask (0 )
87
88
listener.onTaskEnd(SparkListenerTaskEnd (task.stageId, taskType, Success , taskInfo, taskMetrics))
88
89
assert(listener.stageIdToData.getOrElse(0 , fail()).executorSummary.getOrElse(" exe-1" , fail())
89
90
.shuffleRead === 2000 )
90
91
91
92
// finish this task, should get updated duration
92
93
taskInfo = new TaskInfo (1236L , 0 , 2 , 0L , " exe-2" , " host1" , TaskLocality .NODE_LOCAL , false )
93
94
taskInfo.finishTime = 1
94
- task = new ShuffleMapTask (0 , null , null , 0 , null )
95
+ task = new ShuffleMapTask (0 )
95
96
listener.onTaskEnd(SparkListenerTaskEnd (task.stageId, taskType, Success , taskInfo, taskMetrics))
96
97
assert(listener.stageIdToData.getOrElse(0 , fail()).executorSummary.getOrElse(" exe-2" , fail())
97
98
.shuffleRead === 1000 )
@@ -103,7 +104,7 @@ class JobProgressListenerSuite extends FunSuite with LocalSparkContext with Matc
103
104
val metrics = new TaskMetrics ()
104
105
val taskInfo = new TaskInfo (1234L , 0 , 3 , 0L , " exe-1" , " host1" , TaskLocality .NODE_LOCAL , false )
105
106
taskInfo.finishTime = 1
106
- val task = new ShuffleMapTask (0 , null , null , 0 , null )
107
+ val task = new ShuffleMapTask (0 )
107
108
val taskType = Utils .getFormattedClassName(task)
108
109
109
110
// Go through all the failure cases to make sure we are counting them as failures.
0 commit comments