@@ -155,23 +155,29 @@ class SQLListenerSuite extends SparkFunSuite with SharedSQLContext with JsonTest
155
155
listener.onOtherEvent(SparkListenerDriverAccumUpdates (0 , Seq ((999L , 2L ))))
156
156
checkAnswer(listener.getExecutionMetrics(0 ), accumulatorUpdates.mapValues(_ * 2 ))
157
157
158
- listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate (" " , null , Seq (
159
- // (task id, stage id, stage attempt, accum updates)
160
- (0L , 0 , 0 , createTaskMetrics(accumulatorUpdates).accumulators().map(makeInfo)),
161
- (1L , 0 , 0 ,
162
- createTaskMetrics(accumulatorUpdates.mapValues(_ * 2 )).accumulators().map(makeInfo))
163
- )))
158
+ listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate (
159
+ " " ,
160
+ Seq (
161
+ // (task id, stage id, stage attempt, accum updates)
162
+ (0L , 0 , 0 , createTaskMetrics(accumulatorUpdates).accumulators().map(makeInfo)),
163
+ (1L , 0 , 0 ,
164
+ createTaskMetrics(accumulatorUpdates.mapValues(_ * 2 )).accumulators().map(makeInfo))),
165
+ None
166
+ ))
164
167
165
168
checkAnswer(listener.getExecutionMetrics(0 ), accumulatorUpdates.mapValues(_ * 3 ))
166
169
167
170
// Retrying a stage should reset the metrics
168
171
listener.onStageSubmitted(SparkListenerStageSubmitted (createStageInfo(0 , 1 )))
169
172
170
- listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate (" " , null , Seq (
171
- // (task id, stage id, stage attempt, accum updates)
172
- (0L , 0 , 1 , createTaskMetrics(accumulatorUpdates).accumulators().map(makeInfo)),
173
- (1L , 0 , 1 , createTaskMetrics(accumulatorUpdates).accumulators().map(makeInfo))
174
- )))
173
+ listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate (
174
+ " " ,
175
+ Seq (
176
+ // (task id, stage id, stage attempt, accum updates)
177
+ (0L , 0 , 1 , createTaskMetrics(accumulatorUpdates).accumulators().map(makeInfo)),
178
+ (1L , 0 , 1 , createTaskMetrics(accumulatorUpdates).accumulators().map(makeInfo))),
179
+ None
180
+ ))
175
181
176
182
checkAnswer(listener.getExecutionMetrics(0 ), accumulatorUpdates.mapValues(_ * 2 ))
177
183
@@ -207,11 +213,14 @@ class SQLListenerSuite extends SparkFunSuite with SharedSQLContext with JsonTest
207
213
// Summit a new stage
208
214
listener.onStageSubmitted(SparkListenerStageSubmitted (createStageInfo(1 , 0 )))
209
215
210
- listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate (" " , null , Seq (
211
- // (task id, stage id, stage attempt, accum updates)
212
- (0L , 1 , 0 , createTaskMetrics(accumulatorUpdates).accumulators().map(makeInfo)),
213
- (1L , 1 , 0 , createTaskMetrics(accumulatorUpdates).accumulators().map(makeInfo))
214
- )))
216
+ listener.onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate (
217
+ " " ,
218
+ Seq (
219
+ // (task id, stage id, stage attempt, accum updates)
220
+ (0L , 1 , 0 , createTaskMetrics(accumulatorUpdates).accumulators().map(makeInfo)),
221
+ (1L , 1 , 0 , createTaskMetrics(accumulatorUpdates).accumulators().map(makeInfo))),
222
+ None
223
+ ))
215
224
216
225
checkAnswer(listener.getExecutionMetrics(0 ), accumulatorUpdates.mapValues(_ * 7 ))
217
226
0 commit comments