|
18 | 18 | package org.apache.spark.scheduler
|
19 | 19 |
|
20 | 20 | import java.nio.ByteBuffer
|
| 21 | +import java.util.concurrent.RejectedExecutionException |
21 | 22 |
|
22 | 23 | import scala.language.existentials
|
23 | 24 | import scala.util.control.NonFatal
|
@@ -95,25 +96,30 @@ private[spark] class TaskResultGetter(sparkEnv: SparkEnv, scheduler: TaskSchedul
|
95 | 96 | def enqueueFailedTask(taskSetManager: TaskSetManager, tid: Long, taskState: TaskState,
|
96 | 97 | serializedData: ByteBuffer) {
|
97 | 98 | var reason : TaskEndReason = UnknownReason
|
98 |
| - getTaskResultExecutor.execute(new Runnable { |
99 |
| - override def run(): Unit = Utils.logUncaughtExceptions { |
100 |
| - try { |
101 |
| - if (serializedData != null && serializedData.limit() > 0) { |
102 |
| - reason = serializer.get().deserialize[TaskEndReason]( |
103 |
| - serializedData, Utils.getSparkClassLoader) |
| 99 | + try { |
| 100 | + getTaskResultExecutor.execute(new Runnable { |
| 101 | + override def run(): Unit = Utils.logUncaughtExceptions { |
| 102 | + try { |
| 103 | + if (serializedData != null && serializedData.limit() > 0) { |
| 104 | + reason = serializer.get().deserialize[TaskEndReason]( |
| 105 | + serializedData, Utils.getSparkClassLoader) |
| 106 | + } |
| 107 | + } catch { |
| 108 | + case cnd: ClassNotFoundException => |
| 109 | + // Log an error but keep going here -- the task failed, so not catastrophic if we can't |
| 110 | + // deserialize the reason. |
| 111 | + val loader = Utils.getContextOrSparkClassLoader |
| 112 | + logError( |
| 113 | + "Could not deserialize TaskEndReason: ClassNotFound with classloader " + loader) |
| 114 | + case ex: Exception => {} |
104 | 115 | }
|
105 |
| - } catch { |
106 |
| - case cnd: ClassNotFoundException => |
107 |
| - // Log an error but keep going here -- the task failed, so not catastrophic if we can't |
108 |
| - // deserialize the reason. |
109 |
| - val loader = Utils.getContextOrSparkClassLoader |
110 |
| - logError( |
111 |
| - "Could not deserialize TaskEndReason: ClassNotFound with classloader " + loader) |
112 |
| - case ex: Exception => {} |
| 116 | + scheduler.handleFailedTask(taskSetManager, tid, taskState, reason) |
113 | 117 | }
|
114 |
| - scheduler.handleFailedTask(taskSetManager, tid, taskState, reason) |
115 |
| - } |
116 |
| - }) |
| 118 | + }) |
| 119 | + } catch { |
| 120 | + case e: RejectedExecutionException if sparkEnv.isStopped => |
| 121 | + // ignore it |
| 122 | + } |
117 | 123 | }
|
118 | 124 |
|
119 | 125 | def stop() {
|
|
0 commit comments