Skip to content

Commit afc3383

Browse files
committed
Update to follow the code style
1 parent 071fdd1 commit afc3383

File tree

4 files changed

+19
-20
lines changed

4 files changed

+19
-20
lines changed

core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -173,9 +173,8 @@ private[spark] object HttpBroadcast extends Logging {
173173
serOut.writeObject(value)
174174
serOut.close()
175175
files += file.getAbsolutePath
176-
}
177-
finally {
178-
fileOutputStream.close
176+
} finally {
177+
fileOutputStream.close()
179178
}
180179
}
181180

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -327,17 +327,14 @@ object SparkSubmitArguments {
327327
val inputStream = new FileInputStream(file)
328328
try {
329329
val properties = new Properties()
330-
try {
331-
properties.load(inputStream)
332-
} catch {
333-
case e: IOException =>
334-
val message = s"Failed when loading Spark properties file ${file.getName}"
335-
throw new SparkException(message, e)
336-
}
330+
properties.load(inputStream)
337331
properties.stringPropertyNames().toSeq.map(k => (k, properties(k)))
338-
}
339-
finally {
340-
inputStream.close
332+
} catch {
333+
case e: IOException =>
334+
val message = s"Failed when loading Spark properties file ${file.getName}"
335+
throw new SparkException(message, e)
336+
} finally {
337+
inputStream.close()
341338
}
342339
}
343340
}

core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -85,8 +85,7 @@ private[spark] class FileSystemPersistenceEngine(
8585
val out = new FileOutputStream(file)
8686
try {
8787
out.write(serialized)
88-
}
89-
finally {
88+
} finally {
9089
out.close()
9190
}
9291
}
@@ -96,8 +95,7 @@ private[spark] class FileSystemPersistenceEngine(
9695
val dis = new DataInputStream(new FileInputStream(file))
9796
try {
9897
dis.readFully(fileData)
99-
}
100-
finally {
98+
} finally {
10199
dis.close()
102100
}
103101

core/src/main/scala/org/apache/spark/storage/DiskStore.scala

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -79,10 +79,15 @@ private class DiskStore(blockManager: BlockManager, diskManager: DiskBlockManage
7979
val outputStream = new FileOutputStream(file)
8080
try {
8181
blockManager.dataSerializeStream(blockId, outputStream, values)
82+
outputStream.close()
83+
} catch {
84+
case e: Throwable => {
85+
outputStream.close()
86+
if(file.exists()) file.delete()
87+
throw e
88+
}
8289
}
83-
finally {
84-
outputStream.close
85-
}
90+
8691
val length = file.length
8792

8893
val timeTaken = System.currentTimeMillis - startTime

0 commit comments

Comments
 (0)