@@ -19,9 +19,7 @@ package org.apache.spark.sql.execution.adaptive
19
19
20
20
import scala .concurrent .{ExecutionContext , Future }
21
21
import scala .concurrent .duration .Duration
22
-
23
- import org .apache .spark .MapOutputStatistics
24
- import org .apache .spark .broadcast
22
+ import org .apache .spark .{MapOutputStatistics , SparkContext , broadcast }
25
23
import org .apache .spark .rdd .RDD
26
24
import org .apache .spark .sql .catalyst .InternalRow
27
25
import org .apache .spark .sql .catalyst .expressions ._
@@ -54,14 +52,15 @@ abstract class QueryStage extends UnaryExecNode {
54
52
*/
55
53
def executeChildStages (): Unit = {
56
54
val executionId = sqlContext.sparkContext.getLocalProperty(SQLExecution .EXECUTION_ID_KEY )
55
+ val jobDesc = sqlContext.sparkContext.getLocalProperty(SparkContext .SPARK_JOB_DESCRIPTION )
57
56
58
57
// Handle broadcast stages
59
58
val broadcastQueryStages : Seq [BroadcastQueryStage ] = child.collect {
60
59
case bqs : BroadcastQueryStageInput => bqs.childStage
61
60
}
62
61
val broadcastFutures = broadcastQueryStages.map { queryStage =>
63
62
Future {
64
- SQLExecution .withExecutionId (sqlContext.sparkContext, executionId) {
63
+ SQLExecution .withExecutionIdAndJobDesc (sqlContext.sparkContext, executionId, jobDesc ) {
65
64
queryStage.prepareBroadcast()
66
65
}
67
66
}(QueryStage .executionContext)
@@ -73,7 +72,7 @@ abstract class QueryStage extends UnaryExecNode {
73
72
}
74
73
val shuffleStageFutures = shuffleQueryStages.map { queryStage =>
75
74
Future {
76
- SQLExecution .withExecutionId (sqlContext.sparkContext, executionId) {
75
+ SQLExecution .withExecutionIdAndJobDesc (sqlContext.sparkContext, executionId, jobDesc ) {
77
76
queryStage.execute()
78
77
}
79
78
}(QueryStage .executionContext)
0 commit comments