-
Notifications
You must be signed in to change notification settings - Fork 28.7k
SPARK-1291: Link the spark UI to RM ui in yarn-client mode #1112
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
1fe7710
142ee29
3e9630b
6896586
1b92a07
210299c
1fbb925
6022bcd
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -31,10 +31,12 @@ import akka.actor.Terminated | |
import org.apache.spark.{Logging, SecurityManager, SparkConf} | ||
import org.apache.spark.util.{Utils, AkkaUtils} | ||
import org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend | ||
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.AddWebUIFilter | ||
import org.apache.spark.scheduler.SplitInfo | ||
import org.apache.hadoop.yarn.client.api.AMRMClient | ||
import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest | ||
import org.apache.spark.deploy.SparkHadoopUtil | ||
import org.apache.hadoop.yarn.webapp.util.WebAppUtils | ||
|
||
/** | ||
* An application master that allocates executors on behalf of a driver that is running outside | ||
|
@@ -82,6 +84,9 @@ class ExecutorLauncher(args: ApplicationMasterArguments, conf: Configuration, sp | |
case x: DisassociatedEvent => | ||
logInfo(s"Driver terminated or disconnected! Shutting down. $x") | ||
driverClosed = true | ||
case x: AddWebUIFilter => | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can you make the same changes for yarn alpha mode also please |
||
logInfo(s"Add WebUI Filter. $x") | ||
driver ! x | ||
} | ||
} | ||
|
||
|
@@ -99,6 +104,7 @@ class ExecutorLauncher(args: ApplicationMasterArguments, conf: Configuration, sp | |
registerApplicationMaster() | ||
|
||
waitForSparkMaster() | ||
addAmIpFilter() | ||
|
||
// Allocate all containers | ||
allocateExecutors() | ||
|
@@ -142,9 +148,20 @@ class ExecutorLauncher(args: ApplicationMasterArguments, conf: Configuration, sp | |
} | ||
|
||
private def registerApplicationMaster(): RegisterApplicationMasterResponse = { | ||
logInfo("Registering the ApplicationMaster") | ||
// TODO: Find out client's Spark UI address and fill in here? | ||
amClient.registerApplicationMaster(Utils.localHostName(), 0, "") | ||
val appUIAddress = sparkConf.get("spark.driver.appUIAddress", "") | ||
logInfo(s"Registering the ApplicationMaster with appUIAddress: $appUIAddress") | ||
amClient.registerApplicationMaster(Utils.localHostName(), 0, appUIAddress) | ||
} | ||
|
||
// add the yarn amIpFilter that Yarn requires for properly securing the UI | ||
private def addAmIpFilter() { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can the functions addAmIpFilter() in ApplicationMaster.scala and ExecutorLauncher.scala be combined in say a new file YarnUtil.scala? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There are differences in the code.Should not be combined. |
||
val proxy = WebAppUtils.getProxyHostAndPort(conf) | ||
val parts = proxy.split(":") | ||
val proxyBase = System.getenv(ApplicationConstants.APPLICATION_WEB_PROXY_BASE_ENV) | ||
val uriBase = "http://" + proxy + proxyBase | ||
val amFilter = "PROXY_HOST=" + parts(0) + "," + "PROXY_URI_BASE=" + uriBase | ||
val amFilterName = "org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter" | ||
actor ! AddWebUIFilter(amFilterName, amFilter, proxyBase) | ||
} | ||
|
||
private def waitForSparkMaster() { | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
can you use use Option(System.getenv("APPLICATION_WEB_PROXY_BASE")).getOrElse(System.getProperty("spark.ui.proxyBase")).getOrElse("") for this instead of != null checks.
Also we should be using the SparkConf whenever possible and not system properties unless there is explicit reason
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Option(System.getenv("APPLICATION_WEB_PROXY_BASE"))
will create a new objectThere was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Can we set the "spark.ui.proxyBase" property in ApplicationMaster.scala's addAmIpFilter() so that
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
addAmIpFilter
is invoked in yarn Container. The code in Client.scala