Skip to content

Commit af430f1

Browse files
committed
Merge branch 'master' of git://git.apache.org/spark into stage-page-timeline
2 parents e694b8e + 1686032 commit af430f1

40 files changed

+1066
-268
lines changed

bin/spark-shell2.cmd

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ rem
1919

2020
set SPARK_HOME=%~dp0..
2121

22-
echo "%*" | findstr " --help -h" >nul
22+
echo "%*" | findstr " \<--help\> \<-h\>" >nul
2323
if %ERRORLEVEL% equ 0 (
2424
call :usage
2525
exit /b 0

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -217,6 +217,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
217217
private var _heartbeatReceiver: RpcEndpointRef = _
218218
@volatile private var _dagScheduler: DAGScheduler = _
219219
private var _applicationId: String = _
220+
private var _applicationAttemptId: Option[String] = None
220221
private var _eventLogger: Option[EventLoggingListener] = None
221222
private var _executorAllocationManager: Option[ExecutorAllocationManager] = None
222223
private var _cleaner: Option[ContextCleaner] = None
@@ -315,6 +316,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
315316
}
316317

317318
def applicationId: String = _applicationId
319+
def applicationAttemptId: Option[String] = _applicationAttemptId
318320

319321
def metricsSystem: MetricsSystem = if (_env != null) _env.metricsSystem else null
320322

@@ -472,6 +474,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
472474
_taskScheduler.start()
473475

474476
_applicationId = _taskScheduler.applicationId()
477+
_applicationAttemptId = taskScheduler.applicationAttemptId()
475478
_conf.set("spark.app.id", _applicationId)
476479
_env.blockManager.initialize(_applicationId)
477480

@@ -484,7 +487,8 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
484487
_eventLogger =
485488
if (isEventLogEnabled) {
486489
val logger =
487-
new EventLoggingListener(_applicationId, _eventLogDir.get, _conf, _hadoopConfiguration)
490+
new EventLoggingListener(_applicationId, _applicationAttemptId, _eventLogDir.get,
491+
_conf, _hadoopConfiguration)
488492
logger.start()
489493
listenerBus.addListener(logger)
490494
Some(logger)
@@ -1868,7 +1872,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
18681872
// Note: this code assumes that the task scheduler has been initialized and has contacted
18691873
// the cluster manager to get an application ID (in case the cluster manager provides one).
18701874
listenerBus.post(SparkListenerApplicationStart(appName, Some(applicationId),
1871-
startTime, sparkUser))
1875+
startTime, sparkUser, applicationAttemptId))
18721876
}
18731877

18741878
/** Post the application end event */

core/src/main/scala/org/apache/spark/deploy/history/ApplicationHistoryProvider.scala

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,19 @@ package org.apache.spark.deploy.history
1919

2020
import org.apache.spark.ui.SparkUI
2121

22-
private[history] case class ApplicationHistoryInfo(
23-
id: String,
24-
name: String,
22+
private[history] case class ApplicationAttemptInfo(
23+
attemptId: Option[String],
2524
startTime: Long,
2625
endTime: Long,
2726
lastUpdated: Long,
2827
sparkUser: String,
2928
completed: Boolean = false)
3029

30+
private[history] case class ApplicationHistoryInfo(
31+
id: String,
32+
name: String,
33+
attempts: List[ApplicationAttemptInfo])
34+
3135
private[history] abstract class ApplicationHistoryProvider {
3236

3337
/**
@@ -41,9 +45,10 @@ private[history] abstract class ApplicationHistoryProvider {
4145
* Returns the Spark UI for a specific application.
4246
*
4347
* @param appId The application ID.
48+
* @param attemptId The application attempt ID (or None if there is no attempt ID).
4449
* @return The application's UI, or None if application is not found.
4550
*/
46-
def getAppUI(appId: String): Option[SparkUI]
51+
def getAppUI(appId: String, attemptId: Option[String]): Option[SparkUI]
4752

4853
/**
4954
* Called when the server is shutting down.

0 commit comments

Comments
 (0)