Skip to content

Commit 49ef70e

Browse files
committed
Refactor getDefaultPropertiesFile
1 parent c45d20c commit 49ef70e

File tree

2 files changed

+9
-19
lines changed

2 files changed

+9
-19
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
8686
*/
8787
private def mergeSparkProperties(): Unit = {
8888
// Use common defaults file, if not specified by user
89-
propertiesFile = Option(propertiesFile).getOrElse(Utils.getDefaultPropertiesFile)
89+
propertiesFile = Option(propertiesFile).getOrElse(Utils.getDefaultPropertiesFile(env))
9090

9191
val properties = HashMap[String, String]()
9292
properties.putAll(defaultSparkProperties)

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 8 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1417,7 +1417,7 @@ private[spark] object Utils extends Logging {
14171417
* already set. Return the path of the properties file used.
14181418
*/
14191419
def loadDefaultSparkProperties(conf: SparkConf, filePath: String = null): String = {
1420-
val path = Option(filePath).getOrElse(getDefaultPropertiesFile)
1420+
val path = Option(filePath).getOrElse(getDefaultPropertiesFile())
14211421
Option(path).foreach { confFile =>
14221422
getPropertiesFromFile(confFile).filter { case (k, v) =>
14231423
k.startsWith("spark.")
@@ -1449,23 +1449,13 @@ private[spark] object Utils extends Logging {
14491449
}
14501450

14511451
/** Return the path of the default Spark properties file. */
1452-
def getDefaultPropertiesFile(): String = {
1453-
val s = File.separator
1454-
def getAbsolutePath(filePath: String): String = {
1455-
Option(filePath)
1456-
.map(t => new File(t))
1457-
.filter(_.isFile)
1458-
.map(_.getAbsolutePath).orNull
1459-
}
1460-
1461-
val configFile = sys.env.get("SPARK_CONF_DIR")
1462-
.map(t => s"$t${s}spark-defaults.conf")
1463-
.map(getAbsolutePath).orNull
1464-
1465-
Option(configFile).getOrElse(sys.env.get("SPARK_HOME")
1466-
.map(t => s"${t}${s}conf${s}spark-defaults.conf")
1467-
.map(getAbsolutePath)
1468-
.orNull)
1452+
def getDefaultPropertiesFile(env: Map[String, String] = sys.env): String = {
1453+
env.get("SPARK_CONF_DIR")
1454+
.orElse(env.get("SPARK_HOME").map { t => s"$t${File.separator}conf" })
1455+
.map { t => new File(s"$t${File.separator}spark-defaults.conf")}
1456+
.filter(_.isFile)
1457+
.map(_.getAbsolutePath)
1458+
.orNull
14691459
}
14701460

14711461
/** Return a nice string representation of the exception, including the stack trace. */

0 commit comments

Comments
 (0)