-
Notifications
You must be signed in to change notification settings - Fork 28.7k
[SPARK-2165] spark on yarn: add support for setting maxAppAttempts in the ApplicationSubmissionContext #1279
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 5 commits
41e8a39
c2a2b69
b699550
2532b67
9a54afd
c0faef0
89b0e58
256d0e9
fbd0a9e
c5fa23e
f848797
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -124,6 +124,14 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { | |
set("spark.home", home) | ||
} | ||
|
||
/** | ||
* Set the max number of submission retries the Spark client will attempt | ||
* before giving up | ||
*/ | ||
def setMaxAppAttempts(max: Int): SparkConf = { | ||
set("spark.maxappattempts", max.toString()) | ||
} | ||
|
||
/** Set multiple parameters together */ | ||
def setAll(settings: Traversable[(String, String)]) = { | ||
this.settings ++= settings | ||
|
@@ -167,6 +175,8 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { | |
getOption(key).map(_.toInt).getOrElse(defaultValue) | ||
} | ||
|
||
def getIntOption(key: String): Option[Int] = getOption(key).map(_.toInt) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. To keep things consistent (these api's are public) I don't think we should add the getIntOption without adding other routines like getLongOption, etc. For now can you just use getOption and then make it an Int. |
||
|
||
/** Get a parameter as a long, falling back to a default if not set */ | ||
def getLong(key: String, defaultValue: Long): Long = { | ||
getOption(key).map(_.toLong).getOrElse(defaultValue) | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -108,6 +108,10 @@ class Client(clientArgs: ClientArguments, hadoopConf: Configuration, spConf: Spa | |
val appContext = Records.newRecord(classOf[ApplicationSubmissionContext]) | ||
appContext.setApplicationId(appId) | ||
appContext.setApplicationName(args.appName) | ||
sparkConf.getIntOption("spark.maxappattempts") match { | ||
case Some(v) => appContext.setMaxAppAttempts(v) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. hadoop 0.23 (yarn alpha) doesn't have a setMaxAppAttempts routine. Just remove this and only do it in the yarn stable version. |
||
case None => logDebug("Not setting max app attempts.") | ||
} | ||
appContext | ||
} | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -81,6 +81,10 @@ class Client(clientArgs: ClientArguments, hadoopConf: Configuration, spConf: Spa | |
appContext.setQueue(args.amQueue) | ||
appContext.setAMContainerSpec(amContainer) | ||
appContext.setApplicationType("SPARK") | ||
sparkConf.getIntOption("spark.maxappattempts") match { | ||
case Some(v) => appContext.setMaxAppAttempts(v) | ||
case None => logDebug("Not setting max app attempts.") | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can you add something like "cluster default setting will be used" to the log statement? |
||
} | ||
|
||
// Memory for the ApplicationMaster. | ||
val memoryResource = Records.newRecord(classOf[Resource]).asInstanceOf[Resource] | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We haven't been adding specific routines to set the configs. The user can just set it using the existing SparkConf.set routines so I think we should remove this.