@@ -1417,7 +1417,7 @@ private[spark] object Utils extends Logging {
1417
1417
* already set. Return the path of the properties file used.
1418
1418
*/
1419
1419
def loadDefaultSparkProperties (conf : SparkConf , filePath : String = null ): String = {
1420
- val path = Option (filePath).getOrElse(getDefaultPropertiesFile)
1420
+ val path = Option (filePath).getOrElse(getDefaultPropertiesFile() )
1421
1421
Option (path).foreach { confFile =>
1422
1422
getPropertiesFromFile(confFile).filter { case (k, v) =>
1423
1423
k.startsWith(" spark." )
@@ -1449,23 +1449,13 @@ private[spark] object Utils extends Logging {
1449
1449
}
1450
1450
1451
1451
/** Return the path of the default Spark properties file. */
1452
- def getDefaultPropertiesFile (): String = {
1453
- val s = File .separator
1454
- def getAbsolutePath (filePath : String ): String = {
1455
- Option (filePath)
1456
- .map(t => new File (t))
1457
- .filter(_.isFile)
1458
- .map(_.getAbsolutePath).orNull
1459
- }
1460
-
1461
- val configFile = sys.env.get(" SPARK_CONF_DIR" )
1462
- .map(t => s " $t${s}spark-defaults.conf " )
1463
- .map(getAbsolutePath).orNull
1464
-
1465
- Option (configFile).getOrElse(sys.env.get(" SPARK_HOME" )
1466
- .map(t => s " ${t}${s}conf ${s}spark-defaults.conf " )
1467
- .map(getAbsolutePath)
1468
- .orNull)
1452
+ def getDefaultPropertiesFile (env : Map [String , String ] = sys.env): String = {
1453
+ env.get(" SPARK_CONF_DIR" )
1454
+ .orElse(env.get(" SPARK_HOME" ).map { t => s " $t${File .separator}conf " })
1455
+ .map { t => new File (s " $t${File .separator}spark-defaults.conf " )}
1456
+ .filter(_.isFile)
1457
+ .map(_.getAbsolutePath)
1458
+ .orNull
1469
1459
}
1470
1460
1471
1461
/** Return a nice string representation of the exception, including the stack trace. */
0 commit comments