Skip to content

Commit 7db82a1

Browse files
committed
Fixed spark-submit application options handling logic
Any options in the application option list with the same option name that SparkSubmitArguments recognizes (e.g., --help) are stolen by SparkSubmit instead of passed to the application.
1 parent 9cc0f06 commit 7db82a1

File tree

2 files changed

+16
-26
lines changed

2 files changed

+16
-26
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 14 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -204,8 +204,9 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
204204

205205
/** Fill in values by parsing user options. */
206206
private def parseOpts(opts: Seq[String]): Unit = {
207+
val EQ_SEPARATED_OPT = """(--[^=]+)=(.+)""".r
208+
207209
// Delineates parsing of Spark options from parsing of user options.
208-
var inSparkOpts = true
209210
parse(opts)
210211

211212
def parse(opts: Seq[String]): Unit = opts match {
@@ -306,33 +307,21 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
306307
verbose = true
307308
parse(tail)
308309

310+
case EQ_SEPARATED_OPT(opt, value) :: tail =>
311+
// convert --foo=bar to --foo bar
312+
parse(opt :: value :: tail)
313+
314+
case value :: tail if value.startsWith("-") =>
315+
SparkSubmit.printErrorAndExit(s"Unrecognized option '$value'.")
316+
309317
case value :: tail =>
310-
if (inSparkOpts) {
311-
value match {
312-
// convert --foo=bar to --foo bar
313-
case v if v.startsWith("--") && v.contains("=") && v.split("=").size == 2 =>
314-
val parts = v.split("=")
315-
parse(Seq(parts(0), parts(1)) ++ tail)
316-
case v if v.startsWith("-") =>
317-
val errMessage = s"Unrecognized option '$value'."
318-
SparkSubmit.printErrorAndExit(errMessage)
319-
case v =>
320-
primaryResource =
321-
if (!SparkSubmit.isShell(v) && !SparkSubmit.isInternal(v)) {
322-
Utils.resolveURI(v).toString
323-
} else {
324-
v
325-
}
326-
inSparkOpts = false
327-
isPython = SparkSubmit.isPython(v)
328-
parse(tail)
329-
}
318+
primaryResource = if (!SparkSubmit.isShell(value) && !SparkSubmit.isInternal(value)) {
319+
Utils.resolveURI(value).toString
330320
} else {
331-
if (!value.isEmpty) {
332-
childArgs += value
333-
}
334-
parse(tail)
321+
value
335322
}
323+
isPython = SparkSubmit.isPython(value)
324+
childArgs ++= tail.filter(_.nonEmpty)
336325

337326
case Nil =>
338327
}

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,9 +101,10 @@ class SparkSubmitSuite extends FunSuite with Matchers {
101101
"--class", "Foo",
102102
"userjar.jar",
103103
"some",
104+
"--class", "Bar",
104105
"--weird", "args")
105106
val appArgs = new SparkSubmitArguments(clArgs)
106-
appArgs.childArgs should be (Seq("some", "--weird", "args"))
107+
appArgs.childArgs should be (Seq("some", "--class", "Bar", "--weird", "args"))
107108
}
108109

109110
test("handles YARN cluster mode") {

0 commit comments

Comments
 (0)