Skip to content

Commit 24a9f80

Browse files
committed
Merge remote-tracking branch 'apache/master' into structfield-metadata
2 parents 473a7c5 + 293a0b5 commit 24a9f80

19 files changed

+503
-375
lines changed

bin/spark-shell.cmd

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ rem See the License for the specific language governing permissions and
1717
rem limitations under the License.
1818
rem
1919

20-
set SPARK_HOME=%~dp0..
20+
rem This is the entry point for running Spark shell. To avoid polluting the
21+
rem environment, it just launches a new cmd to do the real work.
2122

22-
cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd --class org.apache.spark.repl.Main %* spark-shell
23+
cmd /V /E /C %~dp0spark-shell2.cmd %*

bin/spark-shell2.cmd

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
@echo off
2+
3+
rem
4+
rem Licensed to the Apache Software Foundation (ASF) under one or more
5+
rem contributor license agreements. See the NOTICE file distributed with
6+
rem this work for additional information regarding copyright ownership.
7+
rem The ASF licenses this file to You under the Apache License, Version 2.0
8+
rem (the "License"); you may not use this file except in compliance with
9+
rem the License. You may obtain a copy of the License at
10+
rem
11+
rem http://www.apache.org/licenses/LICENSE-2.0
12+
rem
13+
rem Unless required by applicable law or agreed to in writing, software
14+
rem distributed under the License is distributed on an "AS IS" BASIS,
15+
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
rem See the License for the specific language governing permissions and
17+
rem limitations under the License.
18+
rem
19+
20+
set SPARK_HOME=%~dp0..
21+
22+
cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd --class org.apache.spark.repl.Main %* spark-shell

bin/spark-submit.cmd

Lines changed: 3 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -17,52 +17,7 @@ rem See the License for the specific language governing permissions and
1717
rem limitations under the License.
1818
rem
1919

20-
rem NOTE: Any changes in this file must be reflected in SparkSubmitDriverBootstrapper.scala!
20+
rem This is the entry point for running Spark submit. To avoid polluting the
21+
rem environment, it just launches a new cmd to do the real work.
2122

22-
set SPARK_HOME=%~dp0..
23-
set ORIG_ARGS=%*
24-
25-
rem Reset the values of all variables used
26-
set SPARK_SUBMIT_DEPLOY_MODE=client
27-
set SPARK_SUBMIT_PROPERTIES_FILE=%SPARK_HOME%\conf\spark-defaults.conf
28-
set SPARK_SUBMIT_DRIVER_MEMORY=
29-
set SPARK_SUBMIT_LIBRARY_PATH=
30-
set SPARK_SUBMIT_CLASSPATH=
31-
set SPARK_SUBMIT_OPTS=
32-
set SPARK_SUBMIT_BOOTSTRAP_DRIVER=
33-
34-
:loop
35-
if [%1] == [] goto continue
36-
if [%1] == [--deploy-mode] (
37-
set SPARK_SUBMIT_DEPLOY_MODE=%2
38-
) else if [%1] == [--properties-file] (
39-
set SPARK_SUBMIT_PROPERTIES_FILE=%2
40-
) else if [%1] == [--driver-memory] (
41-
set SPARK_SUBMIT_DRIVER_MEMORY=%2
42-
) else if [%1] == [--driver-library-path] (
43-
set SPARK_SUBMIT_LIBRARY_PATH=%2
44-
) else if [%1] == [--driver-class-path] (
45-
set SPARK_SUBMIT_CLASSPATH=%2
46-
) else if [%1] == [--driver-java-options] (
47-
set SPARK_SUBMIT_OPTS=%2
48-
)
49-
shift
50-
goto loop
51-
:continue
52-
53-
rem For client mode, the driver will be launched in the same JVM that launches
54-
rem SparkSubmit, so we may need to read the properties file for any extra class
55-
rem paths, library paths, java options and memory early on. Otherwise, it will
56-
rem be too late by the time the driver JVM has started.
57-
58-
if [%SPARK_SUBMIT_DEPLOY_MODE%] == [client] (
59-
if exist %SPARK_SUBMIT_PROPERTIES_FILE% (
60-
rem Parse the properties file only if the special configs exist
61-
for /f %%i in ('findstr /r /c:"^[\t ]*spark.driver.memory" /c:"^[\t ]*spark.driver.extra" ^
62-
%SPARK_SUBMIT_PROPERTIES_FILE%') do (
63-
set SPARK_SUBMIT_BOOTSTRAP_DRIVER=1
64-
)
65-
)
66-
)
67-
68-
cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.SparkSubmit %ORIG_ARGS%
23+
cmd /V /E /C %~dp0spark-submit2.cmd %*

bin/spark-submit2.cmd

Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
@echo off
2+
3+
rem
4+
rem Licensed to the Apache Software Foundation (ASF) under one or more
5+
rem contributor license agreements. See the NOTICE file distributed with
6+
rem this work for additional information regarding copyright ownership.
7+
rem The ASF licenses this file to You under the Apache License, Version 2.0
8+
rem (the "License"); you may not use this file except in compliance with
9+
rem the License. You may obtain a copy of the License at
10+
rem
11+
rem http://www.apache.org/licenses/LICENSE-2.0
12+
rem
13+
rem Unless required by applicable law or agreed to in writing, software
14+
rem distributed under the License is distributed on an "AS IS" BASIS,
15+
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
rem See the License for the specific language governing permissions and
17+
rem limitations under the License.
18+
rem
19+
20+
rem NOTE: Any changes in this file must be reflected in SparkSubmitDriverBootstrapper.scala!
21+
22+
set SPARK_HOME=%~dp0..
23+
set ORIG_ARGS=%*
24+
25+
rem Reset the values of all variables used
26+
set SPARK_SUBMIT_DEPLOY_MODE=client
27+
set SPARK_SUBMIT_PROPERTIES_FILE=%SPARK_HOME%\conf\spark-defaults.conf
28+
set SPARK_SUBMIT_DRIVER_MEMORY=
29+
set SPARK_SUBMIT_LIBRARY_PATH=
30+
set SPARK_SUBMIT_CLASSPATH=
31+
set SPARK_SUBMIT_OPTS=
32+
set SPARK_SUBMIT_BOOTSTRAP_DRIVER=
33+
34+
:loop
35+
if [%1] == [] goto continue
36+
if [%1] == [--deploy-mode] (
37+
set SPARK_SUBMIT_DEPLOY_MODE=%2
38+
) else if [%1] == [--properties-file] (
39+
set SPARK_SUBMIT_PROPERTIES_FILE=%2
40+
) else if [%1] == [--driver-memory] (
41+
set SPARK_SUBMIT_DRIVER_MEMORY=%2
42+
) else if [%1] == [--driver-library-path] (
43+
set SPARK_SUBMIT_LIBRARY_PATH=%2
44+
) else if [%1] == [--driver-class-path] (
45+
set SPARK_SUBMIT_CLASSPATH=%2
46+
) else if [%1] == [--driver-java-options] (
47+
set SPARK_SUBMIT_OPTS=%2
48+
)
49+
shift
50+
goto loop
51+
:continue
52+
53+
rem For client mode, the driver will be launched in the same JVM that launches
54+
rem SparkSubmit, so we may need to read the properties file for any extra class
55+
rem paths, library paths, java options and memory early on. Otherwise, it will
56+
rem be too late by the time the driver JVM has started.
57+
58+
if [%SPARK_SUBMIT_DEPLOY_MODE%] == [client] (
59+
if exist %SPARK_SUBMIT_PROPERTIES_FILE% (
60+
rem Parse the properties file only if the special configs exist
61+
for /f %%i in ('findstr /r /c:"^[\t ]*spark.driver.memory" /c:"^[\t ]*spark.driver.extra" ^
62+
%SPARK_SUBMIT_PROPERTIES_FILE%') do (
63+
set SPARK_SUBMIT_BOOTSTRAP_DRIVER=1
64+
)
65+
)
66+
)
67+
68+
cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.SparkSubmit %ORIG_ARGS%

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 3 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,11 @@
1717

1818
package org.apache.spark.deploy
1919

20-
import java.io.{File, FileInputStream, IOException}
21-
import java.util.Properties
2220
import java.util.jar.JarFile
2321

2422
import scala.collection.JavaConversions._
2523
import scala.collection.mutable.{ArrayBuffer, HashMap}
2624

27-
import org.apache.spark.SparkException
2825
import org.apache.spark.util.Utils
2926

3027
/**
@@ -63,9 +60,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
6360
val defaultProperties = new HashMap[String, String]()
6461
if (verbose) SparkSubmit.printStream.println(s"Using properties file: $propertiesFile")
6562
Option(propertiesFile).foreach { filename =>
66-
val file = new File(filename)
67-
SparkSubmitArguments.getPropertiesFromFile(file).foreach { case (k, v) =>
68-
if (k.startsWith("spark")) {
63+
Utils.getPropertiesFromFile(filename).foreach { case (k, v) =>
64+
if (k.startsWith("spark.")) {
6965
defaultProperties(k) = v
7066
if (verbose) SparkSubmit.printStream.println(s"Adding default property: $k=$v")
7167
} else {
@@ -90,19 +86,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
9086
*/
9187
private def mergeSparkProperties(): Unit = {
9288
// Use common defaults file, if not specified by user
93-
if (propertiesFile == null) {
94-
val sep = File.separator
95-
val sparkHomeConfig = env.get("SPARK_HOME").map(sparkHome => s"${sparkHome}${sep}conf")
96-
val confDir = env.get("SPARK_CONF_DIR").orElse(sparkHomeConfig)
97-
98-
confDir.foreach { sparkConfDir =>
99-
val defaultPath = s"${sparkConfDir}${sep}spark-defaults.conf"
100-
val file = new File(defaultPath)
101-
if (file.exists()) {
102-
propertiesFile = file.getAbsolutePath
103-
}
104-
}
105-
}
89+
propertiesFile = Option(propertiesFile).getOrElse(Utils.getDefaultPropertiesFile(env))
10690

10791
val properties = HashMap[String, String]()
10892
properties.putAll(defaultSparkProperties)
@@ -397,23 +381,3 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
397381
SparkSubmit.exitFn()
398382
}
399383
}
400-
401-
object SparkSubmitArguments {
402-
/** Load properties present in the given file. */
403-
def getPropertiesFromFile(file: File): Seq[(String, String)] = {
404-
require(file.exists(), s"Properties file $file does not exist")
405-
require(file.isFile(), s"Properties file $file is not a normal file")
406-
val inputStream = new FileInputStream(file)
407-
try {
408-
val properties = new Properties()
409-
properties.load(inputStream)
410-
properties.stringPropertyNames().toSeq.map(k => (k, properties(k).trim))
411-
} catch {
412-
case e: IOException =>
413-
val message = s"Failed when loading Spark properties file $file"
414-
throw new SparkException(message, e)
415-
} finally {
416-
inputStream.close()
417-
}
418-
}
419-
}

core/src/main/scala/org/apache/spark/deploy/SparkSubmitDriverBootstrapper.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ private[spark] object SparkSubmitDriverBootstrapper {
6868
assume(bootstrapDriver != null, "SPARK_SUBMIT_BOOTSTRAP_DRIVER must be set")
6969

7070
// Parse the properties file for the equivalent spark.driver.* configs
71-
val properties = SparkSubmitArguments.getPropertiesFromFile(new File(propertiesFile)).toMap
71+
val properties = Utils.getPropertiesFromFile(propertiesFile)
7272
val confDriverMemory = properties.get("spark.driver.memory")
7373
val confLibraryPath = properties.get("spark.driver.extraLibraryPath")
7474
val confClasspath = properties.get("spark.driver.extraClassPath")

core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,14 @@
1818
package org.apache.spark.deploy.history
1919

2020
import org.apache.spark.SparkConf
21+
import org.apache.spark.util.Utils
2122

2223
/**
2324
* Command-line parser for the master.
2425
*/
2526
private[spark] class HistoryServerArguments(conf: SparkConf, args: Array[String]) {
2627
private var logDir: String = null
28+
private var propertiesFile: String = null
2729

2830
parse(args.toList)
2931

@@ -32,22 +34,34 @@ private[spark] class HistoryServerArguments(conf: SparkConf, args: Array[String]
3234
case ("--dir" | "-d") :: value :: tail =>
3335
logDir = value
3436
conf.set("spark.history.fs.logDirectory", value)
37+
System.setProperty("spark.history.fs.logDirectory", value)
3538
parse(tail)
3639

3740
case ("--help" | "-h") :: tail =>
3841
printUsageAndExit(0)
3942

43+
case ("--properties-file") :: value :: tail =>
44+
propertiesFile = value
45+
parse(tail)
46+
4047
case Nil =>
4148

4249
case _ =>
4350
printUsageAndExit(1)
4451
}
4552
}
4653

54+
// This mutates the SparkConf, so all accesses to it must be made after this line
55+
Utils.loadDefaultSparkProperties(conf, propertiesFile)
56+
4757
private def printUsageAndExit(exitCode: Int) {
4858
System.err.println(
4959
"""
50-
|Usage: HistoryServer
60+
|Usage: HistoryServer [options]
61+
|
62+
|Options:
63+
| --properties-file FILE Path to a custom Spark properties file.
64+
| Default is conf/spark-defaults.conf.
5165
|
5266
|Configuration options can be set by setting the corresponding JVM system property.
5367
|History Server options are always available; additional options depend on the provider.

core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ private[spark] class MasterArguments(args: Array[String], conf: SparkConf) {
2727
var host = Utils.localHostName()
2828
var port = 7077
2929
var webUiPort = 8080
30+
var propertiesFile: String = null
3031

3132
// Check for settings in environment variables
3233
if (System.getenv("SPARK_MASTER_HOST") != null) {
@@ -38,12 +39,16 @@ private[spark] class MasterArguments(args: Array[String], conf: SparkConf) {
3839
if (System.getenv("SPARK_MASTER_WEBUI_PORT") != null) {
3940
webUiPort = System.getenv("SPARK_MASTER_WEBUI_PORT").toInt
4041
}
42+
43+
parse(args.toList)
44+
45+
// This mutates the SparkConf, so all accesses to it must be made after this line
46+
propertiesFile = Utils.loadDefaultSparkProperties(conf, propertiesFile)
47+
4148
if (conf.contains("spark.master.ui.port")) {
4249
webUiPort = conf.get("spark.master.ui.port").toInt
4350
}
4451

45-
parse(args.toList)
46-
4752
def parse(args: List[String]): Unit = args match {
4853
case ("--ip" | "-i") :: value :: tail =>
4954
Utils.checkHost(value, "ip no longer supported, please use hostname " + value)
@@ -63,7 +68,11 @@ private[spark] class MasterArguments(args: Array[String], conf: SparkConf) {
6368
webUiPort = value
6469
parse(tail)
6570

66-
case ("--help" | "-h") :: tail =>
71+
case ("--properties-file") :: value :: tail =>
72+
propertiesFile = value
73+
parse(tail)
74+
75+
case ("--help") :: tail =>
6776
printUsageAndExit(0)
6877

6978
case Nil => {}
@@ -83,7 +92,9 @@ private[spark] class MasterArguments(args: Array[String], conf: SparkConf) {
8392
" -i HOST, --ip HOST Hostname to listen on (deprecated, please use --host or -h) \n" +
8493
" -h HOST, --host HOST Hostname to listen on\n" +
8594
" -p PORT, --port PORT Port to listen on (default: 7077)\n" +
86-
" --webui-port PORT Port for web UI (default: 8080)")
95+
" --webui-port PORT Port for web UI (default: 8080)\n" +
96+
" --properties-file FILE Path to a custom Spark properties file.\n" +
97+
" Default is conf/spark-defaults.conf.")
8798
System.exit(exitCode)
8899
}
89100
}

core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ private[spark] class WorkerArguments(args: Array[String], conf: SparkConf) {
3333
var memory = inferDefaultMemory()
3434
var masters: Array[String] = null
3535
var workDir: String = null
36+
var propertiesFile: String = null
3637

3738
// Check for settings in environment variables
3839
if (System.getenv("SPARK_WORKER_PORT") != null) {
@@ -47,15 +48,19 @@ private[spark] class WorkerArguments(args: Array[String], conf: SparkConf) {
4748
if (System.getenv("SPARK_WORKER_WEBUI_PORT") != null) {
4849
webUiPort = System.getenv("SPARK_WORKER_WEBUI_PORT").toInt
4950
}
50-
if (conf.contains("spark.worker.ui.port")) {
51-
webUiPort = conf.get("spark.worker.ui.port").toInt
52-
}
5351
if (System.getenv("SPARK_WORKER_DIR") != null) {
5452
workDir = System.getenv("SPARK_WORKER_DIR")
5553
}
5654

5755
parse(args.toList)
5856

57+
// This mutates the SparkConf, so all accesses to it must be made after this line
58+
propertiesFile = Utils.loadDefaultSparkProperties(conf, propertiesFile)
59+
60+
if (conf.contains("spark.worker.ui.port")) {
61+
webUiPort = conf.get("spark.worker.ui.port").toInt
62+
}
63+
5964
checkWorkerMemory()
6065

6166
def parse(args: List[String]): Unit = args match {
@@ -89,7 +94,11 @@ private[spark] class WorkerArguments(args: Array[String], conf: SparkConf) {
8994
webUiPort = value
9095
parse(tail)
9196

92-
case ("--help" | "-h") :: tail =>
97+
case ("--properties-file") :: value :: tail =>
98+
propertiesFile = value
99+
parse(tail)
100+
101+
case ("--help") :: tail =>
93102
printUsageAndExit(0)
94103

95104
case value :: tail =>
@@ -124,7 +133,9 @@ private[spark] class WorkerArguments(args: Array[String], conf: SparkConf) {
124133
" -i HOST, --ip IP Hostname to listen on (deprecated, please use --host or -h)\n" +
125134
" -h HOST, --host HOST Hostname to listen on\n" +
126135
" -p PORT, --port PORT Port to listen on (default: random)\n" +
127-
" --webui-port PORT Port for web UI (default: 8081)")
136+
" --webui-port PORT Port for web UI (default: 8081)\n" +
137+
" --properties-file FILE Path to a custom Spark properties file.\n" +
138+
" Default is conf/spark-defaults.conf.")
128139
System.exit(exitCode)
129140
}
130141

0 commit comments

Comments
 (0)