Skip to content

Commit 301eb4a

Browse files
committed
2 parents cedcc6f + cf1d32e commit 301eb4a

File tree

144 files changed

+3026
-1271
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

144 files changed

+3026
-1271
lines changed

assembly/pom.xml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -349,5 +349,15 @@
349349
</plugins>
350350
</build>
351351
</profile>
352+
<profile>
353+
<id>kinesis-asl</id>
354+
<dependencies>
355+
<dependency>
356+
<groupId>org.apache.httpcomponents</groupId>
357+
<artifactId>httpclient</artifactId>
358+
<version>${commons.httpclient.version}</version>
359+
</dependency>
360+
</dependencies>
361+
</profile>
352362
</profiles>
353363
</project>

bin/compute-classpath.cmd

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,13 @@ rem Load environment variables from conf\spark-env.cmd, if it exists
3636
if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
3737

3838
rem Build up classpath
39-
set CLASSPATH=%SPARK_CLASSPATH%;%SPARK_SUBMIT_CLASSPATH%;%FWDIR%conf
39+
set CLASSPATH=%SPARK_CLASSPATH%;%SPARK_SUBMIT_CLASSPATH%
40+
41+
if "x%SPARK_CONF_DIR%"!="x" (
42+
set CLASSPATH=%CLASSPATH%;%SPARK_CONF_DIR%
43+
) else (
44+
set CLASSPATH=%CLASSPATH%;%FWDIR%conf
45+
)
4046

4147
if exist "%FWDIR%RELEASE" (
4248
for %%d in ("%FWDIR%lib\spark-assembly*.jar") do (

bin/compute-classpath.sh

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,14 @@ FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
2727

2828
. "$FWDIR"/bin/load-spark-env.sh
2929

30+
CLASSPATH="$SPARK_CLASSPATH:$SPARK_SUBMIT_CLASSPATH"
31+
3032
# Build up classpath
31-
CLASSPATH="$SPARK_CLASSPATH:$SPARK_SUBMIT_CLASSPATH:$FWDIR/conf"
33+
if [ -n "$SPARK_CONF_DIR" ]; then
34+
CLASSPATH="$CLASSPATH:$SPARK_CONF_DIR"
35+
else
36+
CLASSPATH="$CLASSPATH:$FWDIR/conf"
37+
fi
3238

3339
ASSEMBLY_DIR="$FWDIR/assembly/target/scala-$SCALA_VERSION"
3440

bin/pyspark

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -52,10 +52,20 @@ fi
5252

5353
# Figure out which Python executable to use
5454
if [[ -z "$PYSPARK_PYTHON" ]]; then
55-
PYSPARK_PYTHON="python"
55+
if [[ "$IPYTHON" = "1" || -n "$IPYTHON_OPTS" ]]; then
56+
# for backward compatibility
57+
PYSPARK_PYTHON="ipython"
58+
else
59+
PYSPARK_PYTHON="python"
60+
fi
5661
fi
5762
export PYSPARK_PYTHON
5863

64+
if [[ -z "$PYSPARK_PYTHON_OPTS" && -n "$IPYTHON_OPTS" ]]; then
65+
# for backward compatibility
66+
PYSPARK_PYTHON_OPTS="$IPYTHON_OPTS"
67+
fi
68+
5969
# Add the PySpark classes to the Python path:
6070
export PYTHONPATH="$SPARK_HOME/python/:$PYTHONPATH"
6171
export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH"
@@ -64,11 +74,6 @@ export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH"
6474
export OLD_PYTHONSTARTUP="$PYTHONSTARTUP"
6575
export PYTHONSTARTUP="$FWDIR/python/pyspark/shell.py"
6676

67-
# If IPython options are specified, assume user wants to run IPython
68-
if [[ -n "$IPYTHON_OPTS" ]]; then
69-
IPYTHON=1
70-
fi
71-
7277
# Build up arguments list manually to preserve quotes and backslashes.
7378
# We export Spark submit arguments as an environment variable because shell.py must run as a
7479
# PYTHONSTARTUP script, which does not take in arguments. This is required for IPython notebooks.
@@ -106,10 +111,5 @@ if [[ "$1" =~ \.py$ ]]; then
106111
else
107112
# PySpark shell requires special handling downstream
108113
export PYSPARK_SHELL=1
109-
# Only use ipython if no command line arguments were provided [SPARK-1134]
110-
if [[ "$IPYTHON" = "1" ]]; then
111-
exec ${PYSPARK_PYTHON:-ipython} $IPYTHON_OPTS
112-
else
113-
exec "$PYSPARK_PYTHON"
114-
fi
114+
exec "$PYSPARK_PYTHON" $PYSPARK_PYTHON_OPTS
115115
fi

bin/pyspark2.cmd

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*
3333
)
3434
if [%FOUND_JAR%] == [0] (
3535
echo Failed to find Spark assembly JAR.
36-
echo You need to build Spark with sbt\sbt assembly before running this program.
36+
echo You need to build Spark before running this program.
3737
goto exit
3838
)
3939
:skip_build_test

bin/run-example2.cmd

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ if exist "%FWDIR%RELEASE" (
5252
)
5353
if "x%SPARK_EXAMPLES_JAR%"=="x" (
5454
echo Failed to find Spark examples assembly JAR.
55-
echo You need to build Spark with sbt\sbt assembly before running this program.
55+
echo You need to build Spark before running this program.
5656
goto exit
5757
)
5858

bin/spark-class

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ fi
146146
if [[ "$1" =~ org.apache.spark.tools.* ]]; then
147147
if test -z "$SPARK_TOOLS_JAR"; then
148148
echo "Failed to find Spark Tools Jar in $FWDIR/tools/target/scala-$SCALA_VERSION/" 1>&2
149-
echo "You need to build spark before running $1." 1>&2
149+
echo "You need to build Spark before running $1." 1>&2
150150
exit 1
151151
fi
152152
CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR"

bin/spark-class2.cmd

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*
104104
)
105105
if "%FOUND_JAR%"=="0" (
106106
echo Failed to find Spark assembly JAR.
107-
echo You need to build Spark with sbt\sbt assembly before running this program.
107+
echo You need to build Spark before running this program.
108108
goto exit
109109
)
110110
:skip_build_test

bin/utils.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
# limitations under the License.
1818
#
1919

20-
# Gather all all spark-submit options into SUBMISSION_OPTS
20+
# Gather all spark-submit options into SUBMISSION_OPTS
2121
function gatherSparkSubmitOpts() {
2222

2323
if [ -z "$SUBMIT_USAGE_FUNCTION" ]; then

core/src/main/scala/org/apache/spark/FutureAction.scala

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,15 @@ trait FutureAction[T] extends Future[T] {
8383
*/
8484
@throws(classOf[Exception])
8585
def get(): T = Await.result(this, Duration.Inf)
86+
87+
/**
88+
* Returns the job IDs run by the underlying async operation.
89+
*
90+
* This returns the current snapshot of the job list. Certain operations may run multiple
91+
* jobs, so multiple calls to this method may return different lists.
92+
*/
93+
def jobIds: Seq[Int]
94+
8695
}
8796

8897

@@ -150,8 +159,7 @@ class SimpleFutureAction[T] private[spark](jobWaiter: JobWaiter[_], resultFunc:
150159
}
151160
}
152161

153-
/** Get the corresponding job id for this action. */
154-
def jobId = jobWaiter.jobId
162+
def jobIds = Seq(jobWaiter.jobId)
155163
}
156164

157165

@@ -171,6 +179,8 @@ class ComplexFutureAction[T] extends FutureAction[T] {
171179
// is cancelled before the action was even run (and thus we have no thread to interrupt).
172180
@volatile private var _cancelled: Boolean = false
173181

182+
@volatile private var jobs: Seq[Int] = Nil
183+
174184
// A promise used to signal the future.
175185
private val p = promise[T]()
176186

@@ -219,6 +229,8 @@ class ComplexFutureAction[T] extends FutureAction[T] {
219229
}
220230
}
221231

232+
this.jobs = jobs ++ job.jobIds
233+
222234
// Wait for the job to complete. If the action is cancelled (with an interrupt),
223235
// cancel the job and stop the execution. This is not in a synchronized block because
224236
// Await.ready eventually waits on the monitor in FutureJob.jobWaiter.
@@ -255,4 +267,7 @@ class ComplexFutureAction[T] extends FutureAction[T] {
255267
override def isCompleted: Boolean = p.isCompleted
256268

257269
override def value: Option[Try[T]] = p.future.value
270+
271+
def jobIds = jobs
272+
258273
}

0 commit comments

Comments
 (0)