Skip to content

Commit f1dd72a

Browse files
author
Marcelo Vanzin
committed
SHS-NG M4.5: Simplify API resource structure.
With the new UI store, the API resource classes have a lot less code, since there's no need for complicated translations between the UI types and the API types. So the code ended up with a bunch of files with a single method declared in them. This change re-structures the API code so that it uses less classes; mainly, most sub-resources were removed, and the code to deal with single-attempt and multi-attempt apps was simplified. The only change was the addition of a method to return a single attempt's information; that was missing in the old API, so trying to retrieve "/v1/applications/appId/attemptId" would result in a 404 even if the attempt existed (and URIs under that one would return valid data). The streaming API resources also overtook the same treatment; the streaming backend is still not hooked up to the store, but once it is, the code in the remaining classes will be simplified even further.
1 parent e72e7b7 commit f1dd72a

24 files changed

+351
-1004
lines changed

common/kvstore/src/main/java/org/apache/spark/kvstore/LevelDB.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -177,6 +177,8 @@ public void delete(Class<?> type, Object naturalKey, boolean sync) throws Except
177177
batch.write(sync);
178178
}
179179
}
180+
} catch (NoSuchElementException nse) {
181+
// Ignore.
180182
} finally {
181183
batch.close();
182184
}

core/src/main/scala/org/apache/spark/status/api/v1/AllExecutorListResource.scala

Lines changed: 0 additions & 30 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/status/api/v1/AllJobsResource.scala

Lines changed: 0 additions & 35 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala

Lines changed: 0 additions & 31 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala

Lines changed: 0 additions & 37 deletions
This file was deleted.

core/src/main/scala/org/apache/spark/status/api/v1/ApiRootResource.scala

Lines changed: 17 additions & 186 deletions
Original file line numberDiff line numberDiff line change
@@ -44,189 +44,14 @@ import org.apache.spark.ui.SparkUI
4444
private[v1] class ApiRootResource extends ApiRequestContext {
4545

4646
@Path("applications")
47-
def getApplicationList(): ApplicationListResource = {
48-
new ApplicationListResource(uiRoot)
49-
}
47+
def applicationList(): Class[ApplicationListResource] = classOf[ApplicationListResource]
5048

5149
@Path("applications/{appId}")
52-
def getApplication(): OneApplicationResource = {
53-
new OneApplicationResource(uiRoot)
54-
}
55-
56-
@Path("applications/{appId}/{attemptId}/jobs")
57-
def getJobs(
58-
@PathParam("appId") appId: String,
59-
@PathParam("attemptId") attemptId: String): AllJobsResource = {
60-
withSparkUI(appId, Some(attemptId)) { ui =>
61-
new AllJobsResource(ui)
62-
}
63-
}
64-
65-
@Path("applications/{appId}/jobs")
66-
def getJobs(@PathParam("appId") appId: String): AllJobsResource = {
67-
withSparkUI(appId, None) { ui =>
68-
new AllJobsResource(ui)
69-
}
70-
}
71-
72-
@Path("applications/{appId}/jobs/{jobId: \\d+}")
73-
def getJob(@PathParam("appId") appId: String): OneJobResource = {
74-
withSparkUI(appId, None) { ui =>
75-
new OneJobResource(ui)
76-
}
77-
}
78-
79-
@Path("applications/{appId}/{attemptId}/jobs/{jobId: \\d+}")
80-
def getJob(
81-
@PathParam("appId") appId: String,
82-
@PathParam("attemptId") attemptId: String): OneJobResource = {
83-
withSparkUI(appId, Some(attemptId)) { ui =>
84-
new OneJobResource(ui)
85-
}
86-
}
87-
88-
@Path("applications/{appId}/executors")
89-
def getExecutors(@PathParam("appId") appId: String): ExecutorListResource = {
90-
withSparkUI(appId, None) { ui =>
91-
new ExecutorListResource(ui)
92-
}
93-
}
94-
95-
@Path("applications/{appId}/allexecutors")
96-
def getAllExecutors(@PathParam("appId") appId: String): AllExecutorListResource = {
97-
withSparkUI(appId, None) { ui =>
98-
new AllExecutorListResource(ui)
99-
}
100-
}
101-
102-
@Path("applications/{appId}/{attemptId}/executors")
103-
def getExecutors(
104-
@PathParam("appId") appId: String,
105-
@PathParam("attemptId") attemptId: String): ExecutorListResource = {
106-
withSparkUI(appId, Some(attemptId)) { ui =>
107-
new ExecutorListResource(ui)
108-
}
109-
}
110-
111-
@Path("applications/{appId}/{attemptId}/allexecutors")
112-
def getAllExecutors(
113-
@PathParam("appId") appId: String,
114-
@PathParam("attemptId") attemptId: String): AllExecutorListResource = {
115-
withSparkUI(appId, Some(attemptId)) { ui =>
116-
new AllExecutorListResource(ui)
117-
}
118-
}
119-
120-
@Path("applications/{appId}/stages")
121-
def getStages(@PathParam("appId") appId: String): AllStagesResource = {
122-
withSparkUI(appId, None) { ui =>
123-
new AllStagesResource(ui)
124-
}
125-
}
126-
127-
@Path("applications/{appId}/{attemptId}/stages")
128-
def getStages(
129-
@PathParam("appId") appId: String,
130-
@PathParam("attemptId") attemptId: String): AllStagesResource = {
131-
withSparkUI(appId, Some(attemptId)) { ui =>
132-
new AllStagesResource(ui)
133-
}
134-
}
135-
136-
@Path("applications/{appId}/stages/{stageId: \\d+}")
137-
def getStage(@PathParam("appId") appId: String): OneStageResource = {
138-
withSparkUI(appId, None) { ui =>
139-
new OneStageResource(ui)
140-
}
141-
}
142-
143-
@Path("applications/{appId}/{attemptId}/stages/{stageId: \\d+}")
144-
def getStage(
145-
@PathParam("appId") appId: String,
146-
@PathParam("attemptId") attemptId: String): OneStageResource = {
147-
withSparkUI(appId, Some(attemptId)) { ui =>
148-
new OneStageResource(ui)
149-
}
150-
}
151-
152-
@Path("applications/{appId}/storage/rdd")
153-
def getRdds(@PathParam("appId") appId: String): AllRDDResource = {
154-
withSparkUI(appId, None) { ui =>
155-
new AllRDDResource(ui)
156-
}
157-
}
158-
159-
@Path("applications/{appId}/{attemptId}/storage/rdd")
160-
def getRdds(
161-
@PathParam("appId") appId: String,
162-
@PathParam("attemptId") attemptId: String): AllRDDResource = {
163-
withSparkUI(appId, Some(attemptId)) { ui =>
164-
new AllRDDResource(ui)
165-
}
166-
}
167-
168-
@Path("applications/{appId}/storage/rdd/{rddId: \\d+}")
169-
def getRdd(@PathParam("appId") appId: String): OneRDDResource = {
170-
withSparkUI(appId, None) { ui =>
171-
new OneRDDResource(ui)
172-
}
173-
}
174-
175-
@Path("applications/{appId}/{attemptId}/storage/rdd/{rddId: \\d+}")
176-
def getRdd(
177-
@PathParam("appId") appId: String,
178-
@PathParam("attemptId") attemptId: String): OneRDDResource = {
179-
withSparkUI(appId, Some(attemptId)) { ui =>
180-
new OneRDDResource(ui)
181-
}
182-
}
183-
184-
@Path("applications/{appId}/logs")
185-
def getEventLogs(
186-
@PathParam("appId") appId: String): EventLogDownloadResource = {
187-
try {
188-
// withSparkUI will throw NotFoundException if attemptId exists for this application.
189-
// So we need to try again with attempt id "1".
190-
withSparkUI(appId, None) { _ =>
191-
new EventLogDownloadResource(uiRoot, appId, None)
192-
}
193-
} catch {
194-
case _: NotFoundException =>
195-
withSparkUI(appId, Some("1")) { _ =>
196-
new EventLogDownloadResource(uiRoot, appId, None)
197-
}
198-
}
199-
}
200-
201-
@Path("applications/{appId}/{attemptId}/logs")
202-
def getEventLogs(
203-
@PathParam("appId") appId: String,
204-
@PathParam("attemptId") attemptId: String): EventLogDownloadResource = {
205-
withSparkUI(appId, Some(attemptId)) { _ =>
206-
new EventLogDownloadResource(uiRoot, appId, Some(attemptId))
207-
}
208-
}
50+
def application(): Class[OneApplicationResource] = classOf[OneApplicationResource]
20951

21052
@Path("version")
211-
def getVersion(): VersionResource = {
212-
new VersionResource(uiRoot)
213-
}
214-
215-
@Path("applications/{appId}/environment")
216-
def getEnvironment(@PathParam("appId") appId: String): ApplicationEnvironmentResource = {
217-
withSparkUI(appId, None) { ui =>
218-
new ApplicationEnvironmentResource(ui)
219-
}
220-
}
53+
def version(): VersionInfo = new VersionInfo(org.apache.spark.SPARK_VERSION)
22154

222-
@Path("applications/{appId}/{attemptId}/environment")
223-
def getEnvironment(
224-
@PathParam("appId") appId: String,
225-
@PathParam("attemptId") attemptId: String): ApplicationEnvironmentResource = {
226-
withSparkUI(appId, Some(attemptId)) { ui =>
227-
new ApplicationEnvironmentResource(ui)
228-
}
229-
}
23055
}
23156

23257
private[spark] object ApiRootResource {
@@ -293,23 +118,29 @@ private[v1] trait ApiRequestContext {
293118

294119
def uiRoot: UIRoot = UIRootFromServletContext.getUiRoot(servletContext)
295120

121+
}
296122

297-
/**
298-
* Get the spark UI with the given appID, and apply a function
299-
* to it. If there is no such app, throw an appropriate exception
300-
*/
301-
def withSparkUI[T](appId: String, attemptId: Option[String])(f: SparkUI => T): T = {
123+
/**
124+
* Base class for resource handlers that use app-specific data. Abstracts away dealing with
125+
* application and attempt IDs, and finding the app's UI.
126+
*/
127+
private[v1] trait BaseAppResource extends ApiRequestContext {
128+
129+
@PathParam("appId") protected[this] var appId: String = _
130+
@PathParam("attemptId") protected[this] var attemptId: String = _
131+
132+
protected def withUI[T](fn: SparkUI => T): T = {
302133
try {
303-
uiRoot.withSparkUI(appId, attemptId) { ui =>
134+
uiRoot.withSparkUI(appId, Option(attemptId)) { ui =>
304135
val user = httpRequest.getRemoteUser()
305136
if (!ui.securityManager.checkUIViewPermissions(user)) {
306137
throw new ForbiddenException(raw"""user "$user" is not authorized""")
307138
}
308-
f(ui)
139+
fn(ui)
309140
}
310141
} catch {
311142
case _: NoSuchElementException =>
312-
val appKey = attemptId.map(appId + "/" + _).getOrElse(appId)
143+
val appKey = Option(attemptId).map(appId + "/" + _).getOrElse(appId)
313144
throw new NotFoundException(s"no such app: $appKey")
314145
}
315146
}

0 commit comments

Comments
 (0)