Skip to content

Commit 89f10e4

Browse files
committed
Merge remote-tracking branch 'upstream/master' into elastic-net-doc
2 parents 9262a72 + e7b6177 commit 89f10e4

File tree

39 files changed

+414
-242
lines changed

39 files changed

+414
-242
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -428,6 +428,8 @@ object SparkSubmit {
428428
OptionAssigner(args.executorCores, YARN, CLIENT, sysProp = "spark.executor.cores"),
429429
OptionAssigner(args.files, YARN, CLIENT, sysProp = "spark.yarn.dist.files"),
430430
OptionAssigner(args.archives, YARN, CLIENT, sysProp = "spark.yarn.dist.archives"),
431+
OptionAssigner(args.principal, YARN, CLIENT, sysProp = "spark.yarn.principal"),
432+
OptionAssigner(args.keytab, YARN, CLIENT, sysProp = "spark.yarn.keytab"),
431433

432434
// Yarn cluster only
433435
OptionAssigner(args.name, YARN, CLUSTER, clOption = "--name"),
@@ -440,10 +442,8 @@ object SparkSubmit {
440442
OptionAssigner(args.files, YARN, CLUSTER, clOption = "--files"),
441443
OptionAssigner(args.archives, YARN, CLUSTER, clOption = "--archives"),
442444
OptionAssigner(args.jars, YARN, CLUSTER, clOption = "--addJars"),
443-
444-
// Yarn client or cluster
445-
OptionAssigner(args.principal, YARN, ALL_DEPLOY_MODES, clOption = "--principal"),
446-
OptionAssigner(args.keytab, YARN, ALL_DEPLOY_MODES, clOption = "--keytab"),
445+
OptionAssigner(args.principal, YARN, CLUSTER, clOption = "--principal"),
446+
OptionAssigner(args.keytab, YARN, CLUSTER, clOption = "--keytab"),
447447

448448
// Other options
449449
OptionAssigner(args.executorCores, STANDALONE, ALL_DEPLOY_MODES,

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -169,6 +169,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
169169
deployMode = Option(deployMode).orElse(env.get("DEPLOY_MODE")).orNull
170170
numExecutors = Option(numExecutors)
171171
.getOrElse(sparkProperties.get("spark.executor.instances").orNull)
172+
keytab = Option(keytab).orElse(sparkProperties.get("spark.yarn.keytab")).orNull
173+
principal = Option(principal).orElse(sparkProperties.get("spark.yarn.principal")).orNull
172174

173175
// Try to set main class from JAR if no --class argument is given
174176
if (mainClass == null && !isPython && !isR && primaryResource != null) {

core/src/main/scala/org/apache/spark/status/api/v1/SimpleDateParam.scala

Lines changed: 21 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -16,40 +16,33 @@
1616
*/
1717
package org.apache.spark.status.api.v1
1818

19-
import java.text.SimpleDateFormat
19+
import java.text.{ParseException, SimpleDateFormat}
2020
import java.util.TimeZone
2121
import javax.ws.rs.WebApplicationException
2222
import javax.ws.rs.core.Response
2323
import javax.ws.rs.core.Response.Status
2424

25-
import scala.util.Try
26-
2725
private[v1] class SimpleDateParam(val originalValue: String) {
28-
val timestamp: Long = {
29-
SimpleDateParam.formats.collectFirst {
30-
case fmt if Try(fmt.parse(originalValue)).isSuccess =>
31-
fmt.parse(originalValue).getTime()
32-
}.getOrElse(
33-
throw new WebApplicationException(
34-
Response
35-
.status(Status.BAD_REQUEST)
36-
.entity("Couldn't parse date: " + originalValue)
37-
.build()
38-
)
39-
)
40-
}
41-
}
4226

43-
private[v1] object SimpleDateParam {
44-
45-
val formats: Seq[SimpleDateFormat] = {
46-
47-
val gmtDay = new SimpleDateFormat("yyyy-MM-dd")
48-
gmtDay.setTimeZone(TimeZone.getTimeZone("GMT"))
49-
50-
Seq(
51-
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSz"),
52-
gmtDay
53-
)
27+
val timestamp: Long = {
28+
val format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSz")
29+
try {
30+
format.parse(originalValue).getTime()
31+
} catch {
32+
case _: ParseException =>
33+
val gmtDay = new SimpleDateFormat("yyyy-MM-dd")
34+
gmtDay.setTimeZone(TimeZone.getTimeZone("GMT"))
35+
try {
36+
gmtDay.parse(originalValue).getTime()
37+
} catch {
38+
case _: ParseException =>
39+
throw new WebApplicationException(
40+
Response
41+
.status(Status.BAD_REQUEST)
42+
.entity("Couldn't parse date: " + originalValue)
43+
.build()
44+
)
45+
}
46+
}
5447
}
5548
}

core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -139,8 +139,8 @@ private[spark] class DiskBlockManager(blockManager: BlockManager, conf: SparkCon
139139
}
140140

141141
private def addShutdownHook(): AnyRef = {
142-
Utils.addShutdownHook { () =>
143-
logDebug("Shutdown hook called")
142+
Utils.addShutdownHook(Utils.TEMP_DIR_SHUTDOWN_PRIORITY + 1) { () =>
143+
logInfo("Shutdown hook called")
144144
DiskBlockManager.this.doStop()
145145
}
146146
}

core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -527,7 +527,7 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
527527
minLaunchTime = launchTime.min(minLaunchTime)
528528
maxFinishTime = finishTime.max(maxFinishTime)
529529

530-
def toProportion(time: Long) = (time.toDouble / totalExecutionTime * 100).toLong
530+
def toProportion(time: Long) = time.toDouble / totalExecutionTime * 100
531531

532532
val metricsOpt = taskUIData.taskMetrics
533533
val shuffleReadTime =

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,13 @@ private[spark] object Utils extends Logging {
7373
*/
7474
val SPARK_CONTEXT_SHUTDOWN_PRIORITY = 50
7575

76+
/**
77+
* The shutdown priority of temp directory must be lower than the SparkContext shutdown
78+
* priority. Otherwise cleaning the temp directories while Spark jobs are running can
79+
* throw undesirable errors at the time of shutdown.
80+
*/
81+
val TEMP_DIR_SHUTDOWN_PRIORITY = 25
82+
7683
private val MAX_DIR_CREATION_ATTEMPTS: Int = 10
7784
@volatile private var localRootDirs: Array[String] = null
7885

@@ -189,10 +196,11 @@ private[spark] object Utils extends Logging {
189196
private val shutdownDeleteTachyonPaths = new scala.collection.mutable.HashSet[String]()
190197

191198
// Add a shutdown hook to delete the temp dirs when the JVM exits
192-
addShutdownHook { () =>
193-
logDebug("Shutdown hook called")
199+
addShutdownHook(TEMP_DIR_SHUTDOWN_PRIORITY) { () =>
200+
logInfo("Shutdown hook called")
194201
shutdownDeletePaths.foreach { dirPath =>
195202
try {
203+
logInfo("Deleting directory " + dirPath)
196204
Utils.deleteRecursively(new File(dirPath))
197205
} catch {
198206
case e: Exception => logError(s"Exception while deleting Spark temp dir: $dirPath", e)

core/src/test/scala/org/apache/spark/SSLSampleConfigs.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ object SSLSampleConfigs {
3434
conf.set("spark.ssl.trustStore", trustStorePath)
3535
conf.set("spark.ssl.trustStorePassword", "password")
3636
conf.set("spark.ssl.enabledAlgorithms",
37-
"TLS_RSA_WITH_AES_128_CBC_SHA, SSL_RSA_WITH_DES_CBC_SHA")
37+
"SSL_RSA_WITH_RC4_128_SHA, SSL_RSA_WITH_DES_CBC_SHA")
3838
conf.set("spark.ssl.protocol", "TLSv1")
3939
conf
4040
}
@@ -48,7 +48,7 @@ object SSLSampleConfigs {
4848
conf.set("spark.ssl.trustStore", trustStorePath)
4949
conf.set("spark.ssl.trustStorePassword", "password")
5050
conf.set("spark.ssl.enabledAlgorithms",
51-
"TLS_RSA_WITH_AES_128_CBC_SHA, SSL_RSA_WITH_DES_CBC_SHA")
51+
"SSL_RSA_WITH_RC4_128_SHA, SSL_RSA_WITH_DES_CBC_SHA")
5252
conf.set("spark.ssl.protocol", "TLSv1")
5353
conf
5454
}

core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ class SecurityManagerSuite extends FunSuite {
147147
assert(securityManager.fileServerSSLOptions.keyPassword === Some("password"))
148148
assert(securityManager.fileServerSSLOptions.protocol === Some("TLSv1"))
149149
assert(securityManager.fileServerSSLOptions.enabledAlgorithms ===
150-
Set("TLS_RSA_WITH_AES_128_CBC_SHA", "SSL_RSA_WITH_DES_CBC_SHA"))
150+
Set("SSL_RSA_WITH_RC4_128_SHA", "SSL_RSA_WITH_DES_CBC_SHA"))
151151

152152
assert(securityManager.akkaSSLOptions.trustStore.isDefined === true)
153153
assert(securityManager.akkaSSLOptions.trustStore.get.getName === "truststore")
@@ -158,7 +158,7 @@ class SecurityManagerSuite extends FunSuite {
158158
assert(securityManager.akkaSSLOptions.keyPassword === Some("password"))
159159
assert(securityManager.akkaSSLOptions.protocol === Some("TLSv1"))
160160
assert(securityManager.akkaSSLOptions.enabledAlgorithms ===
161-
Set("TLS_RSA_WITH_AES_128_CBC_SHA", "SSL_RSA_WITH_DES_CBC_SHA"))
161+
Set("SSL_RSA_WITH_RC4_128_SHA", "SSL_RSA_WITH_DES_CBC_SHA"))
162162
}
163163

164164
test("ssl off setup") {

core/src/test/scala/org/apache/spark/status/api/v1/SimpleDateParamSuite.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
*/
1717
package org.apache.spark.status.api.v1
1818

19+
import javax.ws.rs.WebApplicationException
20+
1921
import org.scalatest.{Matchers, FunSuite}
2022

2123
class SimpleDateParamSuite extends FunSuite with Matchers {
@@ -24,6 +26,9 @@ class SimpleDateParamSuite extends FunSuite with Matchers {
2426
new SimpleDateParam("2015-02-20T23:21:17.190GMT").timestamp should be (1424474477190L)
2527
new SimpleDateParam("2015-02-20T17:21:17.190EST").timestamp should be (1424470877190L)
2628
new SimpleDateParam("2015-02-20").timestamp should be (1424390400000L) // GMT
29+
intercept[WebApplicationException] {
30+
new SimpleDateParam("invalid date")
31+
}
2732
}
2833

2934
}

docs/running-on-yarn.md

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -242,6 +242,22 @@ Most of the configs are the same for Spark on YARN as for other deployment modes
242242
running against earlier versions, this property will be ignored.
243243
</td>
244244
</tr>
245+
<tr>
246+
<td><code>spark.yarn.keytab</code></td>
247+
<td>(none)</td>
248+
<td>
249+
The full path to the file that contains the keytab for the principal specified above.
250+
This keytab will be copied to the node running the Application Master via the Secure Distributed Cache,
251+
for renewing the login tickets and the delegation tokens periodically.
252+
</td>
253+
</tr>
254+
<tr>
255+
<td><code>spark.yarn.principal</code></td>
256+
<td>(none)</td>
257+
<td>
258+
Principal to be used to login to KDC, while running on secure HDFS.
259+
</td>
260+
</tr>
245261
</table>
246262

247263
# Launching Spark on YARN

0 commit comments

Comments
 (0)