Skip to content

Commit ace4ead

Browse files
committed
Responses to review feedback.
1 parent b72d183 commit ace4ead

File tree

12 files changed

+31
-23
lines changed

12 files changed

+31
-23
lines changed

conf/spark-env.sh.template

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
#!/usr/bin/env bash
22

3-
# This file is sourced when running various Spark classes.
3+
# This file is sourced when running various Spark programs.
44
# Copy it as spark-env.sh and edit that to configure Spark for your site.
55

66
# Options read when launching programs locally with
7-
# ./bin/spark-example or ./bin/spark-submit
7+
# ./bin/run-example or ./bin/spark-submit
88
# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
99
# - SPARK_PUBLIC_DNS, to set the public dns name of the driver program
1010
# - SPARK_CLASSPATH, default classpath entries to append
@@ -13,15 +13,15 @@
1313
# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node
1414
# - SPARK_PUBLIC_DNS, to set the public DNS name of the driver program
1515
# - SPARK_CLASSPATH, default classpath entries to append
16-
# - SPARK_LOCAL_DIRS, shuffle directories to use on this node
16+
# - SPARK_LOCAL_DIRS, storage directories to use on this node for shuffle and RDD data
1717
# - MESOS_NATIVE_LIBRARY, to point to your libmesos.so if you use Mesos
1818

1919
# Options read in YARN client mode
2020
# - SPARK_YARN_APP_JAR, Path to your application’s JAR file (required)
21-
# - SPARK_WORKER_INSTANCES, Number of workers to start (Default: 2)
22-
# - SPARK_WORKER_CORES, Number of cores for the workers (Default: 1).
23-
# - SPARK_WORKER_MEMORY, Memory per Worker (e.g. 1000M, 2G) (Default: 1G)
24-
# - SPARK_MASTER_MEMORY, Memory for Master (e.g. 1000M, 2G) (Default: 512 Mb)
21+
# - SPARK_EXECUTOR_INSTANCES, Number of workers to start (Default: 2)
22+
# - SPARK_EXECUTOR_CORES, Number of cores for the workers (Default: 1).
23+
# - SPARK_EXECUTOR_MEMORY, Memory per Worker (e.g. 1000M, 2G) (Default: 1G)
24+
# - SPARK_DRIVER_MEMORY, Memory for Master (e.g. 1000M, 2G) (Default: 512 Mb)
2525
# - SPARK_YARN_APP_NAME, The name of your application (Default: Spark)
2626
# - SPARK_YARN_QUEUE, The hadoop queue to use for allocation requests (Default: ‘default’)
2727
# - SPARK_YARN_DIST_FILES, Comma separated list of files to be distributed with the job.

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -224,7 +224,8 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
224224
throw new Exception(msg)
225225
}
226226
if (javaOpts.contains("-Xmx") || javaOpts.contains("-Xms")) {
227-
val msg = s"$executorOptsKey is not allowed to alter memory settings. Was '$javaOpts'"
227+
val msg = s"$executorOptsKey is not allowed to alter memory settings (was '$javaOpts'). Please use " +
228+
"spark.executor.memory."
228229
throw new Exception(msg)
229230
}
230231
}

core/src/main/scala/org/apache/spark/deploy/Client.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ private class ClientActor(driverArgs: ClientArguments, conf: SparkConf) extends
6060
val libraryPathEntries = sys.props.get("spark.driver.libraryPath").toSeq.flatMap { cp =>
6161
cp.split(java.io.File.pathSeparator)
6262
}
63-
val javaOpts = sys.props.get("spark.driver.javaOpts").toSeq
63+
val javaOpts = sys.props.get("spark.driver.extraJavaOptions")
6464
val command = new Command(mainClass, Seq("{{WORKER_URL}}", driverArgs.mainClass) ++
6565
driverArgs.driverOptions, env, classPathEntries, libraryPathEntries, javaOpts)
6666

core/src/main/scala/org/apache/spark/deploy/Command.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,5 +25,5 @@ private[spark] case class Command(
2525
environment: Map[String, String],
2626
classPathEntries: Seq[String],
2727
libraryPathEntries: Seq[String],
28-
javaOptions: Seq[String]) {
28+
extraJavaOptions: Option[String] = None) {
2929
}

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.deploy
1919

20-
import java.io.{FileInputStream, PrintStream, File}
20+
import java.io.{IOException, FileInputStream, PrintStream, File}
2121
import java.net.URL
2222
import java.util.Properties
2323

@@ -27,6 +27,7 @@ import scala.collection.JavaConversions._
2727
import scala.collection.mutable.ArrayBuffer
2828
import scala.collection.mutable.HashMap
2929
import scala.collection.mutable.Map
30+
import org.apache.spark.SparkException
3031

3132
/**
3233
* Scala code behind the spark-submit script. The script handles setting up the classpath with
@@ -252,9 +253,14 @@ object SparkSubmit {
252253
}
253254

254255
private def getDefaultProperties(file: File): Seq[(String, String)] = {
256+
require(file.exists(), s"Default properties file ${file.getName} does not exist")
255257
val inputStream = new FileInputStream(file)
256258
val properties = new Properties()
257-
properties.load(inputStream)
259+
try {
260+
properties.load(inputStream)
261+
} catch {
262+
case e: IOException => throw new SparkException(s"Failed when loading Spark properties file ${file.getName}", e)
263+
}
258264
properties.stringPropertyNames().toSeq.map(k => (k, properties(k)))
259265
}
260266
}

core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ private[spark] object TestClient {
5050
conf = conf, securityManager = new SecurityManager(conf))
5151
val desc = new ApplicationDescription(
5252
"TestClient", Some(1), 512, Command("spark.deploy.client.TestExecutor", Seq(), Map(), Seq(),
53-
Seq(), Seq()), Some("dummy-spark-home"), "ignored")
53+
Seq()), Some("dummy-spark-home"), "ignored")
5454
val listener = new TestListener
5555
val client = new AppClient(actorSystem, Array(url), desc, listener, new SparkConf)
5656
client.start()

core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,8 @@ object CommandUtils extends Logging {
4747
*/
4848
def buildJavaOpts(command: Command, memory: Int, sparkHome: String): Seq[String] = {
4949
val memoryOpts = Seq(s"-Xms${memory}M", s"-Xmx${memory}M")
50+
// Note, this will coalesce multiple options into a single command component
51+
val extraOpts = command.extraJavaOptions.toSeq
5052
val libraryOpts =
5153
if (command.libraryPathEntries.size > 0) {
5254
val joined = command.libraryPathEntries.mkString(File.pathSeparator)
@@ -63,7 +65,7 @@ object CommandUtils extends Logging {
6365
val userClassPath = command.classPathEntries.mkString(File.pathSeparator)
6466
val classPathWithUser = classPath + File.pathSeparator + userClassPath
6567

66-
Seq("-cp", classPathWithUser) ++ libraryOpts ++ memoryOpts ++ command.javaOptions
68+
Seq("-cp", classPathWithUser) ++ libraryOpts ++ extraOpts ++ memoryOpts
6769
}
6870

6971
/** Spawn a thread that will redirect a given stream to a file */

core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,14 +74,14 @@ private[spark] class DriverRunner(
7474

7575
// Make sure user application jar is on the classpath
7676
// TODO: If we add ability to submit multiple jars they should also be added here
77-
val classPath = driverDesc.command.classPathEntries ++ Seq(s":$localJarFilename")
77+
val classPath = driverDesc.command.classPathEntries ++ Seq(s"$localJarFilename")
7878
val newCommand = Command(
7979
driverDesc.command.mainClass,
8080
driverDesc.command.arguments.map(substituteVariables),
8181
driverDesc.command.environment,
8282
classPath,
8383
driverDesc.command.libraryPathEntries,
84-
driverDesc.command.javaOptions)
84+
driverDesc.command.extraJavaOptions)
8585
val command = CommandUtils.buildCommandSeq(newCommand, driverDesc.mem,
8686
sparkHome.getAbsolutePath)
8787
launchDriver(command, driverDesc.command.environment, driverDir, driverDesc.supervise)

core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ private[spark] class ExecutorRunner(
101101
val command = Command(appDesc.command.mainClass,
102102
appDesc.command.arguments.map(substituteVariables) ++ Seq(appId), appDesc.command.environment,
103103
appDesc.command.classPathEntries, appDesc.command.libraryPathEntries,
104-
appDesc.command.javaOptions)
104+
appDesc.command.extraJavaOptions)
105105
CommandUtils.buildCommandSeq(command, memory, sparkHome.getAbsolutePath)
106106
}
107107

core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,14 +44,14 @@ private[spark] class SparkDeploySchedulerBackend(
4444
val driverUrl = "akka.tcp://spark@%s:%s/user/%s".format(
4545
conf.get("spark.driver.host"), conf.get("spark.driver.port"),
4646
CoarseGrainedSchedulerBackend.ACTOR_NAME)
47-
val args = sc.conf.get("spark.executor.extraJavaOptions").split(" ") ++
48-
Seq(driverUrl, "{{EXECUTOR_ID}}", "{{HOSTNAME}}",
47+
val args = Seq(driverUrl, "{{EXECUTOR_ID}}", "{{HOSTNAME}}",
4948
"{{CORES}}", "{{WORKER_URL}}")
49+
val extraJavaOpts = sc.conf.getOption("spark.executor.extraJavaOptions")
5050

5151
// TODO (pwendell) LOOK AT THIS
5252
val command = Command(
5353
"org.apache.spark.executor.CoarseGrainedExecutorBackend", args, sc.executorEnvs,
54-
Seq(), Seq(), Seq())
54+
Seq(), Seq(), extraJavaOpts)
5555
val sparkHome = sc.getSparkHome()
5656
val appDesc = new ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
5757
sparkHome, sc.ui.appUIAddress, sc.eventLogger.map(_.logDir))

0 commit comments

Comments
 (0)