Skip to content

Commit 42730b3

Browse files
committed
rebase
1 parent 9f801c8 commit 42730b3

File tree

3 files changed

+322
-215
lines changed

3 files changed

+322
-215
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/AdaptiveSparkPlanExec.scala

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ import org.apache.spark.sql.catalyst.InternalRow
3333
import org.apache.spark.sql.catalyst.expressions.Attribute
3434
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, ReturnAnswer}
3535
import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor}
36+
import org.apache.spark.sql.catalyst.trees.TreeNode
3637
import org.apache.spark.sql.execution._
3738
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec._
3839
import org.apache.spark.sql.execution.adaptive.rule.ReduceNumShufflePartitions
@@ -190,10 +191,25 @@ case class AdaptiveSparkPlanExec(
190191
verbose: Boolean,
191192
prefix: String = "",
192193
addSuffix: Boolean = false,
193-
maxFields: Int): Unit = {
194-
super.generateTreeString(depth, lastChildren, append, verbose, prefix, addSuffix, maxFields)
194+
maxFields: Int,
195+
planToOperatorID: mutable.LinkedHashMap[TreeNode[_], Int]): Unit = {
196+
super.generateTreeString(depth,
197+
lastChildren,
198+
append,
199+
verbose,
200+
prefix,
201+
addSuffix,
202+
maxFields,
203+
planToOperatorID)
195204
currentPhysicalPlan.generateTreeString(
196-
depth + 1, lastChildren :+ true, append, verbose, "", addSuffix = false, maxFields)
205+
depth + 1,
206+
lastChildren :+ true,
207+
append,
208+
verbose,
209+
"",
210+
addSuffix = false,
211+
maxFields,
212+
planToOperatorID)
197213
}
198214

199215
/**

sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/QueryStageExec.scala

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
package org.apache.spark.sql.execution.adaptive
1919

20+
import scala.collection.mutable
2021
import scala.concurrent.Future
2122

2223
import org.apache.spark.{FutureAction, MapOutputStatistics}
@@ -26,6 +27,7 @@ import org.apache.spark.sql.catalyst.InternalRow
2627
import org.apache.spark.sql.catalyst.expressions._
2728
import org.apache.spark.sql.catalyst.plans.logical.Statistics
2829
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
30+
import org.apache.spark.sql.catalyst.trees.TreeNode
2931
import org.apache.spark.sql.execution._
3032
import org.apache.spark.sql.execution.exchange._
3133

@@ -107,10 +109,18 @@ abstract class QueryStageExec extends LeafExecNode {
107109
verbose: Boolean,
108110
prefix: String = "",
109111
addSuffix: Boolean = false,
110-
maxFields: Int): Unit = {
111-
super.generateTreeString(depth, lastChildren, append, verbose, prefix, addSuffix, maxFields)
112+
maxFields: Int,
113+
planToOperatorID: mutable.LinkedHashMap[TreeNode[_], Int]): Unit = {
114+
super.generateTreeString(depth,
115+
lastChildren,
116+
append,
117+
verbose,
118+
prefix,
119+
addSuffix,
120+
maxFields,
121+
planToOperatorID)
112122
plan.generateTreeString(
113-
depth + 1, lastChildren :+ true, append, verbose, "", false, maxFields)
123+
depth + 1, lastChildren :+ true, append, verbose, "", false, maxFields, planToOperatorID)
114124
}
115125
}
116126

0 commit comments

Comments
 (0)