@@ -22,7 +22,6 @@ import java.net.Socket
22
22
23
23
import akka .actor .ActorSystem
24
24
25
- import scala .collection .JavaConversions ._
26
25
import scala .collection .mutable
27
26
import scala .util .Properties
28
27
@@ -90,39 +89,42 @@ class SparkEnv (
90
89
private var driverTmpDirToDelete : Option [String ] = None
91
90
92
91
private [spark] def stop () {
93
- isStopped = true
94
- pythonWorkers.foreach { case (key, worker) => worker.stop() }
95
- Option (httpFileServer).foreach(_.stop())
96
- mapOutputTracker.stop()
97
- shuffleManager.stop()
98
- broadcastManager.stop()
99
- blockManager.stop()
100
- blockManager.master.stop()
101
- metricsSystem.stop()
102
- outputCommitCoordinator.stop()
103
- rpcEnv.shutdown()
104
-
105
- // Unfortunately Akka's awaitTermination doesn't actually wait for the Netty server to shut
106
- // down, but let's call it anyway in case it gets fixed in a later release
107
- // UPDATE: In Akka 2.1.x, this hangs if there are remote actors, so we can't call it.
108
- // actorSystem.awaitTermination()
109
-
110
- // Note that blockTransferService is stopped by BlockManager since it is started by it.
111
-
112
- // If we only stop sc, but the driver process still run as a services then we need to delete
113
- // the tmp dir, if not, it will create too many tmp dirs.
114
- // We only need to delete the tmp dir create by driver, because sparkFilesDir is point to the
115
- // current working dir in executor which we do not need to delete.
116
- driverTmpDirToDelete match {
117
- case Some (path) => {
118
- try {
119
- Utils .deleteRecursively(new File (path))
120
- } catch {
121
- case e : Exception =>
122
- logWarning(s " Exception while deleting Spark temp dir: $path" , e)
92
+
93
+ if (! isStopped) {
94
+ isStopped = true
95
+ pythonWorkers.values.foreach(_.stop())
96
+ Option (httpFileServer).foreach(_.stop())
97
+ mapOutputTracker.stop()
98
+ shuffleManager.stop()
99
+ broadcastManager.stop()
100
+ blockManager.stop()
101
+ blockManager.master.stop()
102
+ metricsSystem.stop()
103
+ outputCommitCoordinator.stop()
104
+ rpcEnv.shutdown()
105
+
106
+ // Unfortunately Akka's awaitTermination doesn't actually wait for the Netty server to shut
107
+ // down, but let's call it anyway in case it gets fixed in a later release
108
+ // UPDATE: In Akka 2.1.x, this hangs if there are remote actors, so we can't call it.
109
+ // actorSystem.awaitTermination()
110
+
111
+ // Note that blockTransferService is stopped by BlockManager since it is started by it.
112
+
113
+ // If we only stop sc, but the driver process still run as a services then we need to delete
114
+ // the tmp dir, if not, it will create too many tmp dirs.
115
+ // We only need to delete the tmp dir create by driver, because sparkFilesDir is point to the
116
+ // current working dir in executor which we do not need to delete.
117
+ driverTmpDirToDelete match {
118
+ case Some (path) => {
119
+ try {
120
+ Utils .deleteRecursively(new File (path))
121
+ } catch {
122
+ case e : Exception =>
123
+ logWarning(s " Exception while deleting Spark temp dir: $path" , e)
124
+ }
123
125
}
126
+ case None => // We just need to delete tmp dir created by driver, so do nothing on executor
124
127
}
125
- case None => // We just need to delete tmp dir created by driver, so do nothing on executor
126
128
}
127
129
}
128
130
0 commit comments