Skip to content

Commit 1e89dc8

Browse files
committed
Merge branch 'master' into kafka-0.8.2-test-cleanup
2 parents 4662828 + 700312e commit 1e89dc8

File tree

110 files changed

+2534
-2338
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

110 files changed

+2534
-2338
lines changed

R/create-docs.sh

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,7 @@ set -e
3030
export FWDIR="$(cd "`dirname "$0"`"; pwd)"
3131
pushd $FWDIR
3232

33-
# Generate Rd file
34-
Rscript -e 'library(devtools); devtools::document(pkg="./pkg", roclets=c("rd"))'
35-
36-
# Install the package
33+
# Install the package (this will also generate the Rd files)
3734
./install-dev.sh
3835

3936
# Now create HTML files

R/install-dev.sh

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,5 +34,12 @@ LIB_DIR="$FWDIR/lib"
3434

3535
mkdir -p $LIB_DIR
3636

37-
# Install R
37+
pushd $FWDIR
38+
39+
# Generate Rd files if devtools is installed
40+
Rscript -e ' if("devtools" %in% rownames(installed.packages())) { library(devtools); devtools::document(pkg="./pkg", roclets=c("rd")) }'
41+
42+
# Install SparkR to $LIB_DIR
3843
R CMD INSTALL --library=$LIB_DIR $FWDIR/pkg/
44+
45+
popd

bin/pyspark

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -17,24 +17,10 @@
1717
# limitations under the License.
1818
#
1919

20-
# Figure out where Spark is installed
2120
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
2221

2322
source "$SPARK_HOME"/bin/load-spark-env.sh
24-
25-
function usage() {
26-
if [ -n "$1" ]; then
27-
echo $1
28-
fi
29-
echo "Usage: ./bin/pyspark [options]" 1>&2
30-
"$SPARK_HOME"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
31-
exit $2
32-
}
33-
export -f usage
34-
35-
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
36-
usage
37-
fi
23+
export _SPARK_CMD_USAGE="Usage: ./bin/pyspark [options]"
3824

3925
# In Spark <= 1.1, setting IPYTHON=1 would cause the driver to be launched using the `ipython`
4026
# executable, while the worker would still be launched using PYSPARK_PYTHON.

bin/pyspark2.cmd

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ rem Figure out where the Spark framework is installed
2121
set SPARK_HOME=%~dp0..
2222

2323
call %SPARK_HOME%\bin\load-spark-env.cmd
24+
set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options]
2425

2526
rem Figure out which Python to use.
2627
if "x%PYSPARK_DRIVER_PYTHON%"=="x" (

bin/spark-class

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -16,18 +16,12 @@
1616
# See the License for the specific language governing permissions and
1717
# limitations under the License.
1818
#
19-
set -e
2019

2120
# Figure out where Spark is installed
2221
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
2322

2423
. "$SPARK_HOME"/bin/load-spark-env.sh
2524

26-
if [ -z "$1" ]; then
27-
echo "Usage: spark-class <class> [<args>]" 1>&2
28-
exit 1
29-
fi
30-
3125
# Find the java binary
3226
if [ -n "${JAVA_HOME}" ]; then
3327
RUNNER="${JAVA_HOME}/bin/java"
@@ -98,9 +92,4 @@ CMD=()
9892
while IFS= read -d '' -r ARG; do
9993
CMD+=("$ARG")
10094
done < <("$RUNNER" -cp "$LAUNCH_CLASSPATH" org.apache.spark.launcher.Main "$@")
101-
102-
if [ "${CMD[0]}" = "usage" ]; then
103-
"${CMD[@]}"
104-
else
105-
exec "${CMD[@]}"
106-
fi
95+
exec "${CMD[@]}"

bin/spark-shell

Lines changed: 1 addition & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -29,20 +29,7 @@ esac
2929
set -o posix
3030

3131
export FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
32-
33-
usage() {
34-
if [ -n "$1" ]; then
35-
echo "$1"
36-
fi
37-
echo "Usage: ./bin/spark-shell [options]"
38-
"$FWDIR"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
39-
exit "$2"
40-
}
41-
export -f usage
42-
43-
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
44-
usage "" 0
45-
fi
32+
export _SPARK_CMD_USAGE="Usage: ./bin/spark-shell [options]"
4633

4734
# SPARK-4161: scala does not assume use of the java classpath,
4835
# so we need to add the "-Dscala.usejavacp=true" flag manually. We

bin/spark-shell2.cmd

Lines changed: 2 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,7 @@ rem limitations under the License.
1818
rem
1919

2020
set SPARK_HOME=%~dp0..
21-
22-
echo "%*" | findstr " \<--help\> \<-h\>" >nul
23-
if %ERRORLEVEL% equ 0 (
24-
call :usage
25-
exit /b 0
26-
)
21+
set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options]
2722

2823
rem SPARK-4161: scala does not assume use of the java classpath,
2924
rem so we need to add the "-Dscala.usejavacp=true" flag manually. We
@@ -37,16 +32,4 @@ if "x%SPARK_SUBMIT_OPTS%"=="x" (
3732
set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true"
3833

3934
:run_shell
40-
call %SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main %*
41-
set SPARK_ERROR_LEVEL=%ERRORLEVEL%
42-
if not "x%SPARK_LAUNCHER_USAGE_ERROR%"=="x" (
43-
call :usage
44-
exit /b 1
45-
)
46-
exit /b %SPARK_ERROR_LEVEL%
47-
48-
:usage
49-
echo %SPARK_LAUNCHER_USAGE_ERROR%
50-
echo "Usage: .\bin\spark-shell.cmd [options]" >&2
51-
call %SPARK_HOME%\bin\spark-submit2.cmd --help 2>&1 | findstr /V "Usage" 1>&2
52-
goto :eof
35+
%SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main %*

bin/spark-sql

Lines changed: 2 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -17,41 +17,6 @@
1717
# limitations under the License.
1818
#
1919

20-
#
21-
# Shell script for starting the Spark SQL CLI
22-
23-
# Enter posix mode for bash
24-
set -o posix
25-
26-
# NOTE: This exact class name is matched downstream by SparkSubmit.
27-
# Any changes need to be reflected there.
28-
export CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"
29-
30-
# Figure out where Spark is installed
3120
export FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
32-
33-
function usage {
34-
if [ -n "$1" ]; then
35-
echo "$1"
36-
fi
37-
echo "Usage: ./bin/spark-sql [options] [cli option]"
38-
pattern="usage"
39-
pattern+="\|Spark assembly has been built with Hive"
40-
pattern+="\|NOTE: SPARK_PREPEND_CLASSES is set"
41-
pattern+="\|Spark Command: "
42-
pattern+="\|--help"
43-
pattern+="\|======="
44-
45-
"$FWDIR"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
46-
echo
47-
echo "CLI options:"
48-
"$FWDIR"/bin/spark-class "$CLASS" --help 2>&1 | grep -v "$pattern" 1>&2
49-
exit "$2"
50-
}
51-
export -f usage
52-
53-
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
54-
usage "" 0
55-
fi
56-
57-
exec "$FWDIR"/bin/spark-submit --class "$CLASS" "$@"
21+
export _SPARK_CMD_USAGE="Usage: ./bin/spark-sql [options] [cli option]"
22+
exec "$FWDIR"/bin/spark-submit --class org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver "$@"

bin/spark-submit

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -22,16 +22,4 @@ SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
2222
# disable randomized hash for string in Python 3.3+
2323
export PYTHONHASHSEED=0
2424

25-
# Only define a usage function if an upstream script hasn't done so.
26-
if ! type -t usage >/dev/null 2>&1; then
27-
usage() {
28-
if [ -n "$1" ]; then
29-
echo "$1"
30-
fi
31-
"$SPARK_HOME"/bin/spark-class org.apache.spark.deploy.SparkSubmit --help
32-
exit "$2"
33-
}
34-
export -f usage
35-
fi
36-
3725
exec "$SPARK_HOME"/bin/spark-class org.apache.spark.deploy.SparkSubmit "$@"

bin/spark-submit2.cmd

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -24,15 +24,4 @@ rem disable randomized hash for string in Python 3.3+
2424
set PYTHONHASHSEED=0
2525

2626
set CLASS=org.apache.spark.deploy.SparkSubmit
27-
call %~dp0spark-class2.cmd %CLASS% %*
28-
set SPARK_ERROR_LEVEL=%ERRORLEVEL%
29-
if not "x%SPARK_LAUNCHER_USAGE_ERROR%"=="x" (
30-
call :usage
31-
exit /b 1
32-
)
33-
exit /b %SPARK_ERROR_LEVEL%
34-
35-
:usage
36-
echo %SPARK_LAUNCHER_USAGE_ERROR%
37-
call %SPARK_HOME%\bin\spark-class2.cmd %CLASS% --help
38-
goto :eof
27+
%~dp0spark-class2.cmd %CLASS% %*

0 commit comments

Comments
 (0)