Skip to content

Commit 6eaf7d0

Browse files
committed
executorJavaOpts
1 parent 0faa3b6 commit 6eaf7d0

File tree

6 files changed

+37
-13
lines changed

6 files changed

+37
-13
lines changed

bin/compute-classpath.sh

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,10 @@ CLASSPATH="$SPARK_CLASSPATH:$FWDIR/conf"
3232

3333
ASSEMBLY_DIR="$FWDIR/assembly/target/scala-$SCALA_VERSION"
3434

35+
36+
CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/classes"
3537
# First check if we have a dependencies jar. If so, include binary classes with the deps jar
3638
if [ -f "$ASSEMBLY_DIR"/spark-assembly*hadoop*-deps.jar ]; then
37-
CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/classes"
3839
CLASSPATH="$CLASSPATH:$FWDIR/repl/target/scala-$SCALA_VERSION/classes"
3940
CLASSPATH="$CLASSPATH:$FWDIR/mllib/target/scala-$SCALA_VERSION/classes"
4041
CLASSPATH="$CLASSPATH:$FWDIR/bagel/target/scala-$SCALA_VERSION/classes"

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -208,13 +208,24 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
208208
new SparkConf(false).setAll(settings)
209209
}
210210

211-
/** Print any necessary deprecation warnings based on the values set in this configuration. */
212-
private[spark] def printDeprecationWarnings() {
211+
/** Checks for illegal or deprecated config settings. Throws an exception for the former. */
212+
private[spark] def validateSettings() {
213213
if (settings.contains("spark.local.dir")) {
214214
val msg = "In Spark 1.0 and later spark.local.dir will be overridden by the value set by " +
215215
"the cluster manager (via SPARK_LOCAL_DIRS in mesos/standalone and LOCAL_DIRS in YARN)."
216216
logWarning(msg)
217217
}
218+
val executorOptsKey = "spark.executor.extraJavaOptions"
219+
settings.get(executorOptsKey).map { javaOpts =>
220+
if (javaOpts.contains("-Dspark")) {
221+
val msg = s"$executorOptsKey is not allowed to set Spark options. Was '$javaOpts'"
222+
throw new Exception(msg)
223+
}
224+
if (javaOpts.contains("-Xmx") || javaOpts.contains("-Xms")) {
225+
val msg = s"$executorOptsKey is not allowed to alter memory settings. Was '$javaOpts'"
226+
throw new Exception(msg)
227+
}
228+
}
218229
}
219230

220231
/**

core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,11 @@ private[spark] class SparkDeploySchedulerBackend(
4242

4343
// The endpoint for executors to talk to us
4444
val driverUrl = "akka.tcp://spark@%s:%s/user/%s".format(
45-
conf.get("spark.driver.host"), conf.get("spark.driver.port"),
45+
conf.get("spark.driver.host"), conf.get("spark.driver.port"),
4646
CoarseGrainedSchedulerBackend.ACTOR_NAME)
47-
val args = Seq(driverUrl, "{{EXECUTOR_ID}}", "{{HOSTNAME}}", "{{CORES}}", "{{WORKER_URL}}")
47+
val extraOpts = sc.conf.get("spark.executor.extraJavaOptions", "null")
48+
val args = Seq(extraOpts, driverUrl, "{{EXECUTOR_ID}}", "{{HOSTNAME}}",
49+
"{{CORES}}", "{{WORKER_URL}}")
4850
val command = Command(
4951
"org.apache.spark.executor.CoarseGrainedExecutorBackend", args, sc.executorEnvs)
5052
val sparkHome = sc.getSparkHome()

core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -123,20 +123,22 @@ private[spark] class CoarseMesosSchedulerBackend(
123123
conf.get("spark.driver.host"),
124124
conf.get("spark.driver.port"),
125125
CoarseGrainedSchedulerBackend.ACTOR_NAME)
126+
val extraOpts = conf.get("spark.executor.extraJavaOptions")
126127
val uri = conf.get("spark.executor.uri", null)
127128
if (uri == null) {
128129
val runScript = new File(sparkHome, "./bin/spark-class").getCanonicalPath
129130
command.setValue(
130-
"\"%s\" org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %d".format(
131-
runScript, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
131+
"\"%s\" org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %s %d".format(
132+
runScript, extraOpts, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
132133
} else {
133134
// Grab everything to the first '.'. We'll use that and '*' to
134135
// glob the directory "correctly".
135136
val basename = uri.split('/').last.split('.').head
136137
command.setValue(
137138
("cd %s*; " +
138-
"./bin/spark-class org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %d")
139-
.format(basename, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
139+
"./bin/spark-class org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %s %d")
140+
.format(basename, extraOpts, driverUrl, offer.getSlaveId.getValue,
141+
offer.getHostname, numCores))
140142
command.addUris(CommandInfo.URI.newBuilder().setValue(uri))
141143
}
142144
command.build()

docs/configuration.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -646,6 +646,15 @@ Apart from these, the following properties are also available, and may be useful
646646
Number of cores to allocate for each task.
647647
</td>
648648
</tr>
649+
<tr>
650+
<td>spark.executor.extraJavaOptions</td>
651+
<td>(none)</td>
652+
<td>
653+
A string of extra JVM options to pass to executors. For instance, GC settings. Note that
654+
it is illegal to set Spark properties or heap size settings with this flag.
655+
</td>
656+
</tr>
657+
649658
</table>
650659

651660
## Viewing Spark Properties

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -58,10 +58,9 @@ trait ExecutorRunnableUtil extends Logging {
5858
val executorMemoryString = executorMemory + "m"
5959
JAVA_OPTS += "-Xms" + executorMemoryString + " -Xmx" + executorMemoryString + " "
6060

61-
/* Pass on Spark properties to the driver. */
62-
for ((k, v) <- sys.props.filterKeys(_.startsWith("spark"))) {
63-
JAVA_OPTS += s"-D$k=$v"
64-
}
61+
// Set extra Java options for the executor
62+
val executorOpts = sys.props.find(_._1.contains("spark.executor.extraJavaOptions"))
63+
JAVA_OPTS += executorOpts
6564

6665
JAVA_OPTS += " -Djava.io.tmpdir=" +
6766
new Path(Environment.PWD.$(), YarnConfiguration.DEFAULT_CONTAINER_TEMP_DIR) + " "

0 commit comments

Comments
 (0)