Skip to content

Commit d190936

Browse files
JoshRosenKostas Sakellis
authored andcommitted
Fix a few minor style / formatting nits.
Reset listener after each test Don't null listener out at end of main().
1 parent 8673fe1 commit d190936

File tree

6 files changed

+37
-32
lines changed

6 files changed

+37
-32
lines changed

core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,12 @@ private[spark] object CoarseGrainedClusterMessages {
3939
case class RegisterExecutorFailed(message: String) extends CoarseGrainedClusterMessage
4040

4141
// Executors to driver
42-
case class RegisterExecutor(executorId: String, hostPort: String, cores: Int,
43-
logUrls : Map[String, String]) extends CoarseGrainedClusterMessage {
42+
case class RegisterExecutor(
43+
executorId: String,
44+
hostPort: String,
45+
cores: Int,
46+
logUrls: Map[String, String])
47+
extends CoarseGrainedClusterMessage {
4448
Utils.checkHostPort(hostPort, "Expected host port")
4549
}
4650

core/src/main/scala/org/apache/spark/scheduler/cluster/ExecutorData.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,5 +34,5 @@ private[cluster] class ExecutorData(
3434
override val executorHost: String,
3535
var freeCores: Int,
3636
override val totalCores: Int,
37-
override val logUrlMap : Map[String, String]
37+
override val logUrlMap: Map[String, String]
3838
) extends ExecutorInfo(executorHost, totalCores, logUrlMap)

core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ import org.apache.spark.ui.{ToolTips, UIUtils, WebUIPage}
2626
import org.apache.spark.util.Utils
2727

2828
/** Summary information about an executor to display in the UI. */
29+
// Needs to be private[ui] because of a false positive MiMa failure.
2930
private[ui] case class ExecutorSummaryInfo(
3031
id: String,
3132
hostPort: String,
@@ -41,7 +42,7 @@ private[ui] case class ExecutorSummaryInfo(
4142
totalShuffleRead: Long,
4243
totalShuffleWrite: Long,
4344
maxMemory: Long,
44-
executorLogs : Map[String, String])
45+
executorLogs: Map[String, String])
4546

4647
private[ui] class ExecutorsPage(
4748
parent: ExecutorsTab,
@@ -144,13 +145,15 @@ private[ui] class ExecutorsPage(
144145
{
145146
if (logsExist) {
146147
<td>
147-
{info.executorLogs.map { entry => {
148-
<div>
149-
<a href={s"${entry._2}"}>
150-
{entry._1}
151-
</a>
152-
</div>}
153-
}}
148+
{
149+
info.executorLogs.map { case (logName, logUrl) =>
150+
<div>
151+
<a href={logUrl}>
152+
{logName}
153+
</a>
154+
</div>
155+
}
156+
}
154157
</td>
155158
}
156159
}

core/src/main/scala/org/apache/spark/ui/exec/ExecutorsTab.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.ui.exec
1919

20-
import org.apache.spark.{SparkEnv, Logging, SparkException, TaskState}
21-
2220
import scala.collection.mutable.HashMap
2321

2422
import org.apache.spark.ExceptionFailure
@@ -57,7 +55,7 @@ class ExecutorsListener(storageStatusListener: StorageStatusListener) extends Sp
5755

5856
def storageStatusList = storageStatusListener.storageStatusList
5957

60-
override def onExecutorAdded(executorAdded : SparkListenerExecutorAdded) = synchronized {
58+
override def onExecutorAdded(executorAdded: SparkListenerExecutorAdded) = synchronized {
6159
val eid = executorAdded.executorId
6260
executorToLogUrls(eid) = executorAdded.executorInfo.logUrlMap
6361
}

core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,15 @@
1717

1818
package org.apache.spark.deploy
1919

20+
import scala.collection.mutable
21+
22+
import org.scalatest.{BeforeAndAfter, FunSuite}
23+
2024
import org.apache.spark.scheduler.cluster.ExecutorInfo
2125
import org.apache.spark.scheduler.{SparkListenerExecutorAdded, SparkListener}
2226
import org.apache.spark.{SparkContext, LocalSparkContext}
23-
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite}
2427

25-
import scala.collection.mutable
26-
27-
class LogUrlsStandaloneSuite extends FunSuite with LocalSparkContext
28-
with BeforeAndAfter with BeforeAndAfterAll {
28+
class LogUrlsStandaloneSuite extends FunSuite with LocalSparkContext with BeforeAndAfter {
2929

3030
/** Length of time to wait while draining listener events. */
3131
val WAIT_TIMEOUT_MILLIS = 10000
@@ -44,15 +44,15 @@ class LogUrlsStandaloneSuite extends FunSuite with LocalSparkContext
4444
rdd2.count()
4545

4646
assert(sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS))
47-
listener.addedExecutorInfos.foreach(e => {
48-
assert(e._2.logUrlMap.nonEmpty)
49-
})
47+
listener.addedExecutorInfos.values.foreach { info =>
48+
assert(info.logUrlMap.nonEmpty)
49+
}
5050
}
5151

5252
private class SaveExecutorInfo extends SparkListener {
5353
val addedExecutorInfos = mutable.Map[String, ExecutorInfo]()
5454

55-
override def onExecutorAdded(executor : SparkListenerExecutorAdded) {
55+
override def onExecutorAdded(executor: SparkListenerExecutorAdded) {
5656
addedExecutorInfos(executor.executorId) = executor.executorInfo
5757
}
5858
}

yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,10 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration
2929
import org.apache.hadoop.yarn.server.MiniYARNCluster
3030
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers}
3131

32+
import org.apache.spark.{Logging, SparkConf, SparkContext, SparkException}
3233
import org.apache.spark.scheduler.cluster.ExecutorInfo
3334
import org.apache.spark.scheduler.{SparkListener, SparkListenerExecutorAdded}
3435
import org.apache.spark.util.Utils
35-
import org.apache.spark.{Logging, SparkConf, SparkContext, SparkException}
3636

3737
class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers with Logging {
3838

@@ -146,9 +146,9 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers wit
146146
checkResult(result)
147147

148148
// verify log urls are present
149-
YarnClusterDriver.listener.addedExecutorInfos.foreach(e => {
150-
assert(e._2.logUrlMap.nonEmpty)
151-
})
149+
YarnClusterDriver.listener.addedExecutorInfos.values.foreach { info =>
150+
assert(info.logUrlMap.nonEmpty)
151+
}
152152
}
153153

154154
test("run Spark in yarn-cluster mode") {
@@ -164,9 +164,9 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers wit
164164
checkResult(result)
165165

166166
// verify log urls are present.
167-
YarnClusterDriver.listener.addedExecutorInfos.foreach { e => {
168-
assert(e._2.logUrlMap.nonEmpty)
169-
}}
167+
YarnClusterDriver.listener.addedExecutorInfos.values.foreach { info =>
168+
assert(info.logUrlMap.nonEmpty)
169+
}
170170
}
171171

172172
test("run Spark in yarn-cluster mode unsuccessfully") {
@@ -225,7 +225,7 @@ private class SaveExecutorInfo extends SparkListener {
225225
private object YarnClusterDriver extends Logging with Matchers {
226226

227227
val WAIT_TIMEOUT_MILLIS = 10000
228-
val listener = new SaveExecutorInfo
228+
var listener: SaveExecutorInfo = null
229229

230230
def main(args: Array[String]) = {
231231
if (args.length != 2) {
@@ -238,7 +238,7 @@ private object YarnClusterDriver extends Logging with Matchers {
238238
System.exit(1)
239239
}
240240

241-
241+
listener = new SaveExecutorInfo
242242
val sc = new SparkContext(new SparkConf().setMaster(args(0))
243243
.setAppName("yarn \"test app\" 'with quotes' and \\back\\slashes and $dollarSigns"))
244244
sc.addSparkListener(listener)

0 commit comments

Comments
 (0)