Skip to content

Commit e7315c6

Browse files
committed
Fix failing tests
1 parent 34de899 commit e7315c6

File tree

2 files changed

+19
-19
lines changed

2 files changed

+19
-19
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -106,20 +106,20 @@ object SparkSubmit {
106106

107107
val options = List[OptionAssigner](
108108
new OptionAssigner(appArgs.master, ALL_CLUSTER_MGRS, false, sysProp = "spark.master"),
109-
new OptionAssigner(appArgs.driverMemory, YARN, true, clOption = "--master-memory"),
109+
new OptionAssigner(appArgs.driverMemory, YARN, true, clOption = "--driver-memory"),
110110
new OptionAssigner(appArgs.name, YARN, true, clOption = "--name"),
111111
new OptionAssigner(appArgs.queue, YARN, true, clOption = "--queue"),
112112
new OptionAssigner(appArgs.queue, YARN, false, sysProp = "spark.yarn.queue"),
113-
new OptionAssigner(appArgs.numExecutors, YARN, true, clOption = "--num-workers"),
114-
new OptionAssigner(appArgs.numExecutors, YARN, false, sysProp = "spark.worker.instances"),
115-
new OptionAssigner(appArgs.executorMemory, YARN, true, clOption = "--worker-memory"),
113+
new OptionAssigner(appArgs.numExecutors, YARN, true, clOption = "--num-executors"),
114+
new OptionAssigner(appArgs.numExecutors, YARN, false, sysProp = "spark.executor.instances"),
115+
new OptionAssigner(appArgs.executorMemory, YARN, true, clOption = "--executor-memory"),
116116
new OptionAssigner(appArgs.executorMemory, STANDALONE | MESOS | YARN, false,
117117
sysProp = "spark.executor.memory"),
118118
new OptionAssigner(appArgs.driverMemory, STANDALONE, true, clOption = "--memory"),
119-
new OptionAssigner(appArgs.executorCores, YARN, true, clOption = "--worker-cores"),
120-
new OptionAssigner(appArgs.executorCores, YARN, false, sysProp = "spark.executor.cores"),
121119
new OptionAssigner(appArgs.driverCores, STANDALONE, true, clOption = "--cores"),
122-
new OptionAssigner(appArgs.totalExecutorCores, STANDALONE | MESOS, true,
120+
new OptionAssigner(appArgs.executorCores, YARN, true, clOption = "--executor-cores"),
121+
new OptionAssigner(appArgs.executorCores, YARN, false, sysProp = "spark.executor.cores"),
122+
new OptionAssigner(appArgs.totalExecutorCores, STANDALONE | MESOS, false,
123123
sysProp = "spark.cores.max"),
124124
new OptionAssigner(appArgs.files, YARN, false, sysProp = "spark.yarn.dist.files"),
125125
new OptionAssigner(appArgs.files, YARN, true, clOption = "--files"),

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -40,14 +40,14 @@ class SparkSubmitSuite extends FunSuite with ShouldMatchers {
4040
childArgsStr should include ("--jar thejar.jar")
4141
childArgsStr should include ("--class org.SomeClass")
4242
childArgsStr should include ("--addJars one.jar,two.jar,three.jar")
43-
childArgsStr should include ("--worker-memory 5g")
44-
childArgsStr should include ("--master-memory 4g")
45-
childArgsStr should include ("--worker-cores 5")
43+
childArgsStr should include ("--executor-memory 5g")
44+
childArgsStr should include ("--driver-memory 4g")
45+
childArgsStr should include ("--executor-cores 5")
4646
childArgsStr should include ("--args arg1 --args arg2")
4747
childArgsStr should include ("--queue thequeue")
4848
childArgsStr should include ("--files file1.txt,file2.txt")
4949
childArgsStr should include ("--archives archive1.txt,archive2.txt")
50-
childArgsStr should include ("--num-workers 6")
50+
childArgsStr should include ("--num-executors 6")
5151
mainClass should be ("org.apache.spark.deploy.yarn.Client")
5252
classpath should have length (0)
5353
sysProps should have size (0)
@@ -69,22 +69,22 @@ class SparkSubmitSuite extends FunSuite with ShouldMatchers {
6969
classpath should contain ("two.jar")
7070
classpath should contain ("three.jar")
7171
sysProps("spark.executor.memory") should be ("5g")
72-
sysProps("spark.cores.max") should be ("5")
72+
sysProps("spark.executor.cores") should be ("5")
7373
sysProps("spark.yarn.queue") should be ("thequeue")
7474
sysProps("spark.yarn.dist.files") should be ("file1.txt,file2.txt")
7575
sysProps("spark.yarn.dist.archives") should be ("archive1.txt,archive2.txt")
76-
sysProps("spark.worker.instances") should be ("6")
76+
sysProps("spark.executor.instances") should be ("6")
7777
}
7878

7979
test("handles standalone cluster mode") {
8080
val clArgs = Array("thejar.jar", "--deploy-mode", "cluster",
81-
"--master", "spark://h:p", "--executor-memory", "5g", "--executor-cores", "5",
82-
"--class", "org.SomeClass", "--arg", "arg1", "--arg", "arg2", "--supervise",
83-
"--driver-memory", "4g")
81+
"--master", "spark://h:p", "--class", "org.SomeClass", "--arg", "arg1", "--arg", "arg2",
82+
"--supervise", "--driver-memory", "4g", "--driver-cores", "5")
8483
val appArgs = new SparkSubmitArguments(clArgs)
8584
val (childArgs, classpath, sysProps, mainClass) = createLaunchEnv(appArgs)
8685
val childArgsStr = childArgs.mkString(" ")
87-
childArgsStr.startsWith("--memory 5g --cores 5 --supervise") should be (true)
86+
print("child args: " + childArgsStr)
87+
childArgsStr.startsWith("--memory 4g --cores 5 --supervise") should be (true)
8888
childArgsStr should include ("launch spark://h:p thejar.jar org.SomeClass arg1 arg2")
8989
mainClass should be ("org.apache.spark.deploy.Client")
9090
classpath should have length (0)
@@ -93,7 +93,7 @@ class SparkSubmitSuite extends FunSuite with ShouldMatchers {
9393

9494
test("handles standalone client mode") {
9595
val clArgs = Array("thejar.jar", "--deploy-mode", "client",
96-
"--master", "spark://h:p", "--executor-memory", "5g", "--executor-cores", "5",
96+
"--master", "spark://h:p", "--executor-memory", "5g", "--total-executor-cores", "5",
9797
"--class", "org.SomeClass", "--arg", "arg1", "--arg", "arg2",
9898
"--driver-memory", "4g")
9999
val appArgs = new SparkSubmitArguments(clArgs)
@@ -107,7 +107,7 @@ class SparkSubmitSuite extends FunSuite with ShouldMatchers {
107107

108108
test("handles mesos client mode") {
109109
val clArgs = Array("thejar.jar", "--deploy-mode", "client",
110-
"--master", "mesos://h:p", "--executor-memory", "5g", "--executor-cores", "5",
110+
"--master", "mesos://h:p", "--executor-memory", "5g", "--total-executor-cores", "5",
111111
"--class", "org.SomeClass", "--arg", "arg1", "--arg", "arg2",
112112
"--driver-memory", "4g")
113113
val appArgs = new SparkSubmitArguments(clArgs)

0 commit comments

Comments
 (0)