@@ -204,8 +204,9 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
204
204
205
205
/** Fill in values by parsing user options. */
206
206
private def parseOpts (opts : Seq [String ]): Unit = {
207
+ val EQ_SEPARATED_OPT = """ (--[^=]+)=(.+)""" .r
208
+
207
209
// Delineates parsing of Spark options from parsing of user options.
208
- var inSparkOpts = true
209
210
parse(opts)
210
211
211
212
def parse (opts : Seq [String ]): Unit = opts match {
@@ -306,33 +307,21 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
306
307
verbose = true
307
308
parse(tail)
308
309
310
+ case EQ_SEPARATED_OPT (opt, value) :: tail =>
311
+ // convert --foo=bar to --foo bar
312
+ parse(opt :: value :: tail)
313
+
314
+ case value :: tail if value.startsWith(" -" ) =>
315
+ SparkSubmit .printErrorAndExit(s " Unrecognized option ' $value'. " )
316
+
309
317
case value :: tail =>
310
- if (inSparkOpts) {
311
- value match {
312
- // convert --foo=bar to --foo bar
313
- case v if v.startsWith(" --" ) && v.contains(" =" ) && v.split(" =" ).size == 2 =>
314
- val parts = v.split(" =" )
315
- parse(Seq (parts(0 ), parts(1 )) ++ tail)
316
- case v if v.startsWith(" -" ) =>
317
- val errMessage = s " Unrecognized option ' $value'. "
318
- SparkSubmit .printErrorAndExit(errMessage)
319
- case v =>
320
- primaryResource =
321
- if (! SparkSubmit .isShell(v) && ! SparkSubmit .isInternal(v)) {
322
- Utils .resolveURI(v).toString
323
- } else {
324
- v
325
- }
326
- inSparkOpts = false
327
- isPython = SparkSubmit .isPython(v)
328
- parse(tail)
329
- }
318
+ primaryResource = if (! SparkSubmit .isShell(value) && ! SparkSubmit .isInternal(value)) {
319
+ Utils .resolveURI(value).toString
330
320
} else {
331
- if (! value.isEmpty) {
332
- childArgs += value
333
- }
334
- parse(tail)
321
+ value
335
322
}
323
+ isPython = SparkSubmit .isPython(value)
324
+ childArgs ++= tail.filter(_.nonEmpty)
336
325
337
326
case Nil =>
338
327
}
0 commit comments