Skip to content

Commit 2c93209

Browse files
author
Marcelo Vanzin
committed
[SPARK-6046] [core] Reorganize deprecated config support in SparkConf.
This change tries to follow the chosen way for handling deprecated configs in SparkConf: all values (old and new) are kept in the conf object, and newer names take precedence over older ones when retrieving the value. Warnings are logged when config options are set, which generally happens on the driver node (where the logs are most visible).
1 parent 51b306b commit 2c93209

File tree

2 files changed

+107
-80
lines changed

2 files changed

+107
-80
lines changed

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 91 additions & 80 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
6868
if (value == null) {
6969
throw new NullPointerException("null value for " + key)
7070
}
71+
logDeprecationWarning(key)
7172
settings.put(key, value)
7273
this
7374
}
@@ -134,13 +135,16 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
134135

135136
/** Set multiple parameters together */
136137
def setAll(settings: Traversable[(String, String)]): SparkConf = {
138+
settings.foreach { case (k, v) => logDeprecationWarning(k) }
137139
this.settings.putAll(settings.toMap.asJava)
138140
this
139141
}
140142

141143
/** Set a parameter if it isn't already configured */
142144
def setIfMissing(key: String, value: String): SparkConf = {
143-
settings.putIfAbsent(key, value)
145+
if (settings.putIfAbsent(key, value) == null) {
146+
logDeprecationWarning(key)
147+
}
144148
this
145149
}
146150

@@ -174,45 +178,45 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
174178
getOption(key).getOrElse(defaultValue)
175179
}
176180

177-
/**
178-
* Get a time parameter as seconds; throws a NoSuchElementException if it's not set. If no
181+
/**
182+
* Get a time parameter as seconds; throws a NoSuchElementException if it's not set. If no
179183
* suffix is provided then seconds are assumed.
180184
* @throws NoSuchElementException
181185
*/
182186
def getTimeAsSeconds(key: String): Long = {
183187
Utils.timeStringAsSeconds(get(key))
184188
}
185189

186-
/**
187-
* Get a time parameter as seconds, falling back to a default if not set. If no
190+
/**
191+
* Get a time parameter as seconds, falling back to a default if not set. If no
188192
* suffix is provided then seconds are assumed.
189-
*
193+
*
190194
*/
191195
def getTimeAsSeconds(key: String, defaultValue: String): Long = {
192196
Utils.timeStringAsSeconds(get(key, defaultValue))
193197
}
194198

195-
/**
196-
* Get a time parameter as milliseconds; throws a NoSuchElementException if it's not set. If no
197-
* suffix is provided then milliseconds are assumed.
199+
/**
200+
* Get a time parameter as milliseconds; throws a NoSuchElementException if it's not set. If no
201+
* suffix is provided then milliseconds are assumed.
198202
* @throws NoSuchElementException
199203
*/
200204
def getTimeAsMs(key: String): Long = {
201205
Utils.timeStringAsMs(get(key))
202206
}
203207

204-
/**
205-
* Get a time parameter as milliseconds, falling back to a default if not set. If no
206-
* suffix is provided then milliseconds are assumed.
208+
/**
209+
* Get a time parameter as milliseconds, falling back to a default if not set. If no
210+
* suffix is provided then milliseconds are assumed.
207211
*/
208212
def getTimeAsMs(key: String, defaultValue: String): Long = {
209213
Utils.timeStringAsMs(get(key, defaultValue))
210214
}
211-
215+
212216

213217
/** Get a parameter as an Option */
214218
def getOption(key: String): Option[String] = {
215-
Option(settings.get(key))
219+
Option(settings.get(key)).orElse(getDeprecatedConfig(key, this))
216220
}
217221

218222
/** Get all parameters as a list of pairs */
@@ -379,13 +383,6 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
379383
}
380384
}
381385
}
382-
383-
// Warn against the use of deprecated configs
384-
deprecatedConfigs.values.foreach { dc =>
385-
if (contains(dc.oldName)) {
386-
dc.warn()
387-
}
388-
}
389386
}
390387

391388
/**
@@ -400,19 +397,39 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
400397

401398
private[spark] object SparkConf extends Logging {
402399

403-
private val deprecatedConfigs: Map[String, DeprecatedConfig] = {
404-
val configs = Seq(
405-
DeprecatedConfig("spark.files.userClassPathFirst", "spark.executor.userClassPathFirst",
406-
"1.3"),
407-
DeprecatedConfig("spark.yarn.user.classpath.first", null, "1.3",
408-
"Use spark.{driver,executor}.userClassPathFirst instead."),
409-
DeprecatedConfig("spark.history.fs.updateInterval",
410-
"spark.history.fs.update.interval.seconds",
411-
"1.3", "Use spark.history.fs.update.interval.seconds instead"),
412-
DeprecatedConfig("spark.history.updateInterval",
413-
"spark.history.fs.update.interval.seconds",
414-
"1.3", "Use spark.history.fs.update.interval.seconds instead"))
415-
configs.map { x => (x.oldName, x) }.toMap
400+
/**
401+
* Maps deprecated config keys to information about the deprecation.
402+
*
403+
* The extra information is logged as a warning when the config is present in the user's
404+
* configuration.
405+
*/
406+
private val deprecatedConfigs = Map[String, DeprecatedConfig](
407+
"spark.yarn.user.classpath.first" ->
408+
DeprecatedConfig("1.3", "Use spark.{driver,executor}.userClassPathFirst instead.")
409+
)
410+
411+
/**
412+
* Maps a current config key to alternate keys that were used in previous version of Spark.
413+
*
414+
* The alternates are used in the order defined in this map. If deprecated configs are
415+
* present in the user's configuration, a warning is logged.
416+
*/
417+
private val configsWithAlternatives = Map[String, Seq[AlternateConfig]](
418+
"spark.executor.userClassPathFirst" -> Seq(
419+
AlternateConfig("spark.files.userClassPathFirst", "1.3")),
420+
"spark.history.fs.update.interval.seconds" -> Seq(
421+
AlternateConfig("spark.history.fs.updateInterval", "1.3"),
422+
AlternateConfig("spark.history.updateInterval", "1.3"))
423+
)
424+
425+
/**
426+
* A view of `configsWithAlternatives` that makes it more efficient to look up deprecated
427+
* config keys.
428+
*/
429+
private val allAlternatives: Map[String, (String, AlternateConfig)] = {
430+
configsWithAlternatives.keys.flatMap { key =>
431+
configsWithAlternatives(key).map { cfg => (cfg.key -> (key -> cfg)) }
432+
}.toMap
416433
}
417434

418435
/**
@@ -443,61 +460,55 @@ private[spark] object SparkConf extends Logging {
443460
}
444461

445462
/**
446-
* Translate the configuration key if it is deprecated and has a replacement, otherwise just
447-
* returns the provided key.
448-
*
449-
* @param userKey Configuration key from the user / caller.
450-
* @param warn Whether to print a warning if the key is deprecated. Warnings will be printed
451-
* only once for each key.
463+
* Looks for available deprecated keys for the given config option, and return the first
464+
* value available.
452465
*/
453-
private def translateConfKey(userKey: String, warn: Boolean = false): String = {
454-
deprecatedConfigs.get(userKey)
455-
.map { deprecatedKey =>
456-
if (warn) {
457-
deprecatedKey.warn()
458-
}
459-
deprecatedKey.newName.getOrElse(userKey)
460-
}.getOrElse(userKey)
466+
def getDeprecatedConfig(key: String, conf: SparkConf): Option[String] = {
467+
configsWithAlternatives.get(key).flatMap(
468+
_.collectFirst {
469+
case x if conf.contains(x.key) =>
470+
val value = conf.get(x.key)
471+
x.translation.map(_(value)).getOrElse(value)
472+
})
473+
}
474+
475+
/**
476+
* Logs a warning message if the given config key is deprecated.
477+
*/
478+
def logDeprecationWarning(key: String): Unit = {
479+
deprecatedConfigs.get(key).foreach { cfg =>
480+
logWarning(
481+
s"The configuration option '$key' has been deprecated as of Spark ${cfg.version} and " +
482+
s"may be removed in the future. ${cfg.deprecationMessage}")
483+
}
484+
485+
allAlternatives.get(key).foreach { case (newKey, cfg) =>
486+
logWarning(
487+
s"The configuration option '$key' has been replaced as of Spark ${cfg.version} " +
488+
s"and may be removed in the future. Please use the new config '$newKey' instead.")
489+
}
461490
}
462491

463492
/**
464-
* Holds information about keys that have been deprecated or renamed.
493+
* Holds information about keys that have been deprecated and do not have a replacement.
465494
*
466-
* @param oldName Old configuration key.
467-
* @param newName New configuration key, or `null` if key has no replacement, in which case the
468-
* deprecated key will be used (but the warning message will still be printed).
469495
* @param version Version of Spark where key was deprecated.
470-
* @param deprecationMessage Message to include in the deprecation warning; mandatory when
471-
* `newName` is not provided.
496+
* @param deprecationMessage Message to include in the deprecation warning.
472497
*/
473498
private case class DeprecatedConfig(
474-
oldName: String,
475-
_newName: String,
476499
version: String,
477-
deprecationMessage: String = null) {
478-
479-
private val warned = new AtomicBoolean(false)
480-
val newName = Option(_newName)
481-
482-
if (newName == null && (deprecationMessage == null || deprecationMessage.isEmpty())) {
483-
throw new IllegalArgumentException("Need new config name or deprecation message.")
484-
}
500+
deprecationMessage: String = null)
485501

486-
def warn(): Unit = {
487-
if (warned.compareAndSet(false, true)) {
488-
if (newName != null) {
489-
val message = Option(deprecationMessage).getOrElse(
490-
s"Please use the alternative '$newName' instead.")
491-
logWarning(
492-
s"The configuration option '$oldName' has been replaced as of Spark $version and " +
493-
s"may be removed in the future. $message")
494-
} else {
495-
logWarning(
496-
s"The configuration option '$oldName' has been deprecated as of Spark $version and " +
497-
s"may be removed in the future. $deprecationMessage")
498-
}
499-
}
500-
}
502+
/**
503+
* Information about an alternate configuration key that has been deprecated.
504+
*
505+
* @param key The deprecated config key.
506+
* @param version The Spark version in which the key was deprecated.
507+
* @param translation A translation function for converting old config values into new ones.
508+
*/
509+
private case class AlternateConfig(
510+
key: String,
511+
version: String,
512+
translation: Option[String => String] = None)
501513

502-
}
503514
}

core/src/test/scala/org/apache/spark/SparkConfSuite.scala

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -197,6 +197,22 @@ class SparkConfSuite extends FunSuite with LocalSparkContext with ResetSystemPro
197197
serializer.newInstance().serialize(new StringBuffer())
198198
}
199199

200+
test("deprecated configs") {
201+
val conf = new SparkConf()
202+
val newName = "spark.history.fs.update.interval.seconds"
203+
204+
assert(!conf.contains(newName))
205+
206+
conf.set("spark.history.updateInterval", "1")
207+
assert(conf.get(newName) === "1")
208+
209+
conf.set("spark.history.fs.updateInterval", "2")
210+
assert(conf.get(newName) === "2")
211+
212+
conf.set(newName, "3")
213+
assert(conf.get(newName) === "3")
214+
}
215+
200216
}
201217

202218
class Class1 {}

0 commit comments

Comments
 (0)