Skip to content

Commit c41a664

Browse files
committed
merge master
2 parents d8af0ed + b32bb72 commit c41a664

File tree

388 files changed

+12106
-5211
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

388 files changed

+12106
-5211
lines changed

.gitignore

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,12 @@
11
*~
2+
*.#*
3+
*#*#
24
*.swp
35
*.ipr
46
*.iml
57
*.iws
68
.idea/
9+
.idea_modules/
710
sbt/*.jar
811
.settings
912
.cache
@@ -16,9 +19,11 @@ third_party/libmesos.so
1619
third_party/libmesos.dylib
1720
conf/java-opts
1821
conf/*.sh
22+
conf/*.cmd
1923
conf/*.properties
2024
conf/*.conf
2125
conf/*.xml
26+
conf/slaves
2227
docs/_site
2328
docs/api
2429
target/

.rat-excludes

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,9 @@ log4j.properties
1919
log4j.properties.template
2020
metrics.properties.template
2121
slaves
22+
slaves.template
2223
spark-env.sh
24+
spark-env.cmd
2325
spark-env.sh.template
2426
log4j-defaults.properties
2527
bootstrap-tooltip.js
@@ -58,3 +60,4 @@ dist/*
5860
.*iws
5961
logs
6062
.*scalastyle-output.xml
63+
.*dependency-reduced-pom.xml

assembly/pom.xml

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,9 @@
141141
<include>com.google.common.**</include>
142142
</includes>
143143
<excludes>
144-
<exclude>com.google.common.base.Optional**</exclude>
144+
<exclude>com/google/common/base/Absent*</exclude>
145+
<exclude>com/google/common/base/Optional*</exclude>
146+
<exclude>com/google/common/base/Present*</exclude>
145147
</excludes>
146148
</relocation>
147149
</relocations>
@@ -347,5 +349,15 @@
347349
</plugins>
348350
</build>
349351
</profile>
352+
<profile>
353+
<id>kinesis-asl</id>
354+
<dependencies>
355+
<dependency>
356+
<groupId>org.apache.httpcomponents</groupId>
357+
<artifactId>httpclient</artifactId>
358+
<version>${commons.httpclient.version}</version>
359+
</dependency>
360+
</dependencies>
361+
</profile>
350362
</profiles>
351363
</project>

bagel/src/test/resources/log4j.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ log4j.appender.file=org.apache.log4j.FileAppender
2121
log4j.appender.file.append=false
2222
log4j.appender.file.file=target/unit-tests.log
2323
log4j.appender.file.layout=org.apache.log4j.PatternLayout
24-
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %p %c{1}: %m%n
24+
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
2525

2626
# Ignore messages below warning level from Jetty, because it's a bit verbose
2727
log4j.logger.org.eclipse.jetty=WARN

bin/compute-classpath.cmd

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,13 @@ rem Load environment variables from conf\spark-env.cmd, if it exists
3636
if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
3737

3838
rem Build up classpath
39-
set CLASSPATH=%SPARK_CLASSPATH%;%SPARK_SUBMIT_CLASSPATH%;%FWDIR%conf
39+
set CLASSPATH=%SPARK_CLASSPATH%;%SPARK_SUBMIT_CLASSPATH%
40+
41+
if not "x%SPARK_CONF_DIR%"=="x" (
42+
set CLASSPATH=%CLASSPATH%;%SPARK_CONF_DIR%
43+
) else (
44+
set CLASSPATH=%CLASSPATH%;%FWDIR%conf
45+
)
4046

4147
if exist "%FWDIR%RELEASE" (
4248
for %%d in ("%FWDIR%lib\spark-assembly*.jar") do (

bin/compute-classpath.sh

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,14 @@ FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
2727

2828
. "$FWDIR"/bin/load-spark-env.sh
2929

30+
CLASSPATH="$SPARK_CLASSPATH:$SPARK_SUBMIT_CLASSPATH"
31+
3032
# Build up classpath
31-
CLASSPATH="$SPARK_CLASSPATH:$SPARK_SUBMIT_CLASSPATH:$FWDIR/conf"
33+
if [ -n "$SPARK_CONF_DIR" ]; then
34+
CLASSPATH="$CLASSPATH:$SPARK_CONF_DIR"
35+
else
36+
CLASSPATH="$CLASSPATH:$FWDIR/conf"
37+
fi
3238

3339
ASSEMBLY_DIR="$FWDIR/assembly/target/scala-$SCALA_VERSION"
3440

bin/pyspark

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -52,10 +52,20 @@ fi
5252

5353
# Figure out which Python executable to use
5454
if [[ -z "$PYSPARK_PYTHON" ]]; then
55-
PYSPARK_PYTHON="python"
55+
if [[ "$IPYTHON" = "1" || -n "$IPYTHON_OPTS" ]]; then
56+
# for backward compatibility
57+
PYSPARK_PYTHON="ipython"
58+
else
59+
PYSPARK_PYTHON="python"
60+
fi
5661
fi
5762
export PYSPARK_PYTHON
5863

64+
if [[ -z "$PYSPARK_PYTHON_OPTS" && -n "$IPYTHON_OPTS" ]]; then
65+
# for backward compatibility
66+
PYSPARK_PYTHON_OPTS="$IPYTHON_OPTS"
67+
fi
68+
5969
# Add the PySpark classes to the Python path:
6070
export PYTHONPATH="$SPARK_HOME/python/:$PYTHONPATH"
6171
export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH"
@@ -64,11 +74,6 @@ export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH"
6474
export OLD_PYTHONSTARTUP="$PYTHONSTARTUP"
6575
export PYTHONSTARTUP="$FWDIR/python/pyspark/shell.py"
6676

67-
# If IPython options are specified, assume user wants to run IPython
68-
if [[ -n "$IPYTHON_OPTS" ]]; then
69-
IPYTHON=1
70-
fi
71-
7277
# Build up arguments list manually to preserve quotes and backslashes.
7378
# We export Spark submit arguments as an environment variable because shell.py must run as a
7479
# PYTHONSTARTUP script, which does not take in arguments. This is required for IPython notebooks.
@@ -106,10 +111,5 @@ if [[ "$1" =~ \.py$ ]]; then
106111
else
107112
# PySpark shell requires special handling downstream
108113
export PYSPARK_SHELL=1
109-
# Only use ipython if no command line arguments were provided [SPARK-1134]
110-
if [[ "$IPYTHON" = "1" ]]; then
111-
exec ${PYSPARK_PYTHON:-ipython} $IPYTHON_OPTS
112-
else
113-
exec "$PYSPARK_PYTHON"
114-
fi
114+
exec "$PYSPARK_PYTHON" $PYSPARK_PYTHON_OPTS
115115
fi

bin/pyspark2.cmd

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ for %%d in ("%FWDIR%assembly\target\scala-%SCALA_VERSION%\spark-assembly*hadoop*
3333
)
3434
if [%FOUND_JAR%] == [0] (
3535
echo Failed to find Spark assembly JAR.
36-
echo You need to build Spark with sbt\sbt assembly before running this program.
36+
echo You need to build Spark before running this program.
3737
goto exit
3838
)
3939
:skip_build_test

bin/run-example2.cmd

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ if exist "%FWDIR%RELEASE" (
5252
)
5353
if "x%SPARK_EXAMPLES_JAR%"=="x" (
5454
echo Failed to find Spark examples assembly JAR.
55-
echo You need to build Spark with sbt\sbt assembly before running this program.
55+
echo You need to build Spark before running this program.
5656
goto exit
5757
)
5858

bin/spark-class

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ fi
146146
if [[ "$1" =~ org.apache.spark.tools.* ]]; then
147147
if test -z "$SPARK_TOOLS_JAR"; then
148148
echo "Failed to find Spark Tools Jar in $FWDIR/tools/target/scala-$SCALA_VERSION/" 1>&2
149-
echo "You need to build spark before running $1." 1>&2
149+
echo "You need to build Spark before running $1." 1>&2
150150
exit 1
151151
fi
152152
CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR"

0 commit comments

Comments
 (0)