From e9d7fb71dde9c8a266fd2accfc01663b0a2306b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?baishuo=28=E7=99=BD=E7=A1=95=29?= Date: Sun, 16 Mar 2014 22:41:49 +0800 Subject: [PATCH 1/7] Update CommandUtils.scala --- .../org/apache/spark/deploy/worker/CommandUtils.scala | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala index 0c761dfc93a1f..f07855858135d 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala @@ -60,7 +60,14 @@ object CommandUtils extends Logging { Seq(sparkHome + "/bin/compute-classpath" + ext), extraEnvironment=command.environment) - Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts + val debugflage = System.getProperty("spark.excutor.debug", "0").toInt + if (debugflage==0) { + Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts + } + else { + val debugInfo = "-Xrunjdwp:transport=dt_socket,address=" +System.getProperty("spark.excutor.debug.port", "18000") +",server=y,suspend=y" + Seq("-Xdebug",debugInfo, "-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts + } } /** Spawn a thread that will redirect a given stream to a file */ From 679a82ab9cb4de629276985730ede7d12cf7cf9e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?baishuo=28=E7=99=BD=E7=A1=95=29?= Date: Mon, 17 Mar 2014 14:26:18 +0800 Subject: [PATCH 2/7] Update CommandUtils.scala --- .../scala/org/apache/spark/deploy/worker/CommandUtils.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala index f07855858135d..6b7a17cf327eb 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala @@ -60,8 +60,8 @@ object CommandUtils extends Logging { Seq(sparkHome + "/bin/compute-classpath" + ext), extraEnvironment=command.environment) - val debugflage = System.getProperty("spark.excutor.debug", "0").toInt - if (debugflage==0) { + val debugflag = System.getProperty("spark.excutor.debug", "0").toInt + if (debugflag==0) { Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts } else { From 8cd5fca9799ef1494383c95dc33e5781c612e650 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?baishuo=28=E7=99=BD=E7=A1=95=29?= Date: Mon, 17 Mar 2014 14:53:23 +0800 Subject: [PATCH 3/7] Update CommandUtils.scala --- .../scala/org/apache/spark/deploy/worker/CommandUtils.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala index 6b7a17cf327eb..f6f2de40d16d6 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala @@ -60,8 +60,8 @@ object CommandUtils extends Logging { Seq(sparkHome + "/bin/compute-classpath" + ext), extraEnvironment=command.environment) - val debugflag = System.getProperty("spark.excutor.debug", "0").toInt - if (debugflag==0) { + val debugflag = System.getProperty("spark.excutor.debug", "0").toBoolean + if (!debugflag) { Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts } else { From 90c1588f0614006e6d5f1d017c73631fe5fc4cbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?baishuo=28=E7=99=BD=E7=A1=95=29?= Date: Mon, 17 Mar 2014 15:26:55 +0800 Subject: [PATCH 4/7] Update monitoring.md --- docs/monitoring.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/monitoring.md b/docs/monitoring.md index 15bfb041780da..712b2e93012cf 100644 --- a/docs/monitoring.md +++ b/docs/monitoring.md @@ -81,3 +81,7 @@ can provide fine-grained profiling on individual nodes. * JVM utilities such as `jstack` for providing stack traces, `jmap` for creating heap-dumps, `jstat` for reporting time-series statistics and `jconsole` for visually exploring various JVM properties are useful for those comfortable with JVM internals. + +#debug the Excutor process + +Since the excutor process is started by ProcessBuilder, if we wang to trace the code, we can modify the CommandUtils.scala, add the option for remote-debugging. please refer https://github.com/apache/spark/pull/157 From 3519e511b26c887715446e59f408b9466cddb436 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?baishuo=28=E7=99=BD=E7=A1=95=29?= Date: Mon, 17 Mar 2014 18:44:53 +0800 Subject: [PATCH 5/7] Update CommandUtils.scala --- .../scala/org/apache/spark/deploy/worker/CommandUtils.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala index f6f2de40d16d6..68b477f44baa3 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala @@ -60,7 +60,7 @@ object CommandUtils extends Logging { Seq(sparkHome + "/bin/compute-classpath" + ext), extraEnvironment=command.environment) - val debugflag = System.getProperty("spark.excutor.debug", "0").toBoolean + val debugflag = System.getProperty("spark.excutor.debug", "false").toBoolean if (!debugflag) { Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts } From a7093de03748ce88819b2673e452b9df09377a7f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?baishuo=28=E7=99=BD=E7=A1=95=29?= Date: Mon, 17 Mar 2014 18:46:40 +0800 Subject: [PATCH 6/7] Update monitoring.md --- docs/monitoring.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/monitoring.md b/docs/monitoring.md index 712b2e93012cf..d06c1296c92a1 100644 --- a/docs/monitoring.md +++ b/docs/monitoring.md @@ -84,4 +84,4 @@ properties are useful for those comfortable with JVM internals. #debug the Excutor process -Since the excutor process is started by ProcessBuilder, if we wang to trace the code, we can modify the CommandUtils.scala, add the option for remote-debugging. please refer https://github.com/apache/spark/pull/157 +Since the excutor process is started by ProcessBuilder, if we wang to trace the code, we can modify the CommandUtils.scala, and rebuild the package. How to add the option for remote-debugging. please refer https://github.com/apache/spark/pull/157 From 9f933c6b71a21fb5c30ea7a8ae81d740542e68a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?baishuo=28=E7=99=BD=E7=A1=95=29?= Date: Mon, 17 Mar 2014 23:48:29 +0800 Subject: [PATCH 7/7] Update monitoring.md --- docs/monitoring.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/monitoring.md b/docs/monitoring.md index d06c1296c92a1..165eb745e16d8 100644 --- a/docs/monitoring.md +++ b/docs/monitoring.md @@ -84,4 +84,4 @@ properties are useful for those comfortable with JVM internals. #debug the Excutor process -Since the excutor process is started by ProcessBuilder, if we wang to trace the code, we can modify the CommandUtils.scala, and rebuild the package. How to add the option for remote-debugging. please refer https://github.com/apache/spark/pull/157 +Since the excutor process is started by ProcessBuilder, if we wang to trace the code, we can modify the CommandUtils.scala, and rebuild the package. We can set spark.excutor.debug=true(default is false) to turn on this function and set spark.excutor.debug.port=xxxx (default is 18000)to specify the port for remote-debugging . The both option are set as jvm arguments when we start one worker process. please refer https://github.com/apache/spark/pull/157 for detail