File tree Expand file tree Collapse file tree 3 files changed +14
-0
lines changed
core/src/main/scala/org/apache/spark/deploy
yarn/src/main/scala/org/apache/spark/deploy/yarn Expand file tree Collapse file tree 3 files changed +14
-0
lines changed Original file line number Diff line number Diff line change @@ -328,6 +328,10 @@ object SparkSubmit {
328
328
}
329
329
}
330
330
331
+ if (args.isPython && System .getenv(" PYSPARK_ARCHIVES_PATH" ) != null ) {
332
+ args.files = mergeFileLists(args.files, System .getenv(" PYSPARK_ARCHIVES_PATH" ))
333
+ }
334
+
331
335
// If we're running a R app, set the main class to our specific R runner
332
336
if (args.isR && deployMode == CLIENT ) {
333
337
if (args.primaryResource == SPARKR_SHELL ) {
Original file line number Diff line number Diff line change @@ -326,6 +326,12 @@ private[spark] class Client(
326
326
distCacheMgr.setDistFilesEnv(env)
327
327
distCacheMgr.setDistArchivesEnv(env)
328
328
329
+ if (System .getenv(" PYSPARK_ARCHIVES_PATH" ) != null ) {
330
+ val pythonPath = System .getenv(" PYSPARK_ARCHIVES_PATH" ).split(" ," ).map(
331
+ p => (new Path (p)).getName).mkString(" :" )
332
+ env(" PYTHONPATH" ) = pythonPath
333
+ }
334
+
329
335
// Pick up any environment variables for the AM provided through spark.yarn.appMasterEnv.*
330
336
val amEnvPrefix = " spark.yarn.appMasterEnv."
331
337
sparkConf.getAll
Original file line number Diff line number Diff line change @@ -285,6 +285,10 @@ class ExecutorRunnable(
285
285
YarnSparkHadoopUtil .addPathToEnvironment(env, key, value)
286
286
}
287
287
288
+ if (System .getenv(" PYTHONPATH" ) != null ) {
289
+ env(" PYTHONPATH" ) = System .getenv(" PYTHONPATH" )
290
+ }
291
+
288
292
// Keep this for backwards compatibility but users should move to the config
289
293
sys.env.get(" SPARK_YARN_USER_ENV" ).foreach { userEnvs =>
290
294
YarnSparkHadoopUtil .setEnvFromInputString(env, userEnvs)
You can’t perform that action at this time.
0 commit comments