|
17 | 17 | # limitations under the License.
|
18 | 18 | #
|
19 | 19 |
|
| 20 | +# |
| 21 | +# Shell script for starting the Spark SQL Thrift server |
| 22 | + |
| 23 | +SCALA_VERSION=2.10 |
| 24 | + |
| 25 | +cygwin=false |
| 26 | +case "`uname`" in |
| 27 | + CYGWIN*) cygwin=true;; |
| 28 | +esac |
| 29 | + |
| 30 | +# Enter posix mode for bash |
| 31 | +set -o posix |
| 32 | + |
| 33 | +## Global script variables |
| 34 | +FWDIR="$(cd `dirname $0`/..; pwd)" |
| 35 | + |
| 36 | +if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then |
| 37 | + echo "Usage: ./sbin/start-thriftserver [options]" |
| 38 | + $FWDIR/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2 |
| 39 | + exit 0 |
| 40 | +fi |
| 41 | + |
20 | 42 | # Figure out where Spark is installed
|
21 | 43 | FWDIR="$(cd `dirname $0`/..; pwd)"
|
22 | 44 |
|
| 45 | +ASSEMBLY_DIR="$FWDIR/assembly/target/scala-$SCALA_VERSION" |
| 46 | + |
| 47 | +if [ -n "$JAVA_HOME" ]; then |
| 48 | + JAR_CMD="$JAVA_HOME/bin/jar" |
| 49 | +else |
| 50 | + JAR_CMD="jar" |
| 51 | +fi |
| 52 | + |
| 53 | +# Use spark-assembly jar from either RELEASE or assembly directory |
| 54 | +if [ -f "$FWDIR/RELEASE" ]; then |
| 55 | + assembly_folder="$FWDIR"/lib |
| 56 | +else |
| 57 | + assembly_folder="$ASSEMBLY_DIR" |
| 58 | +fi |
| 59 | + |
| 60 | +num_jars=$(ls "$assembly_folder" | grep "spark-assembly.*hadoop.*\.jar" | wc -l) |
| 61 | +if [ "$num_jars" -eq "0" ]; then |
| 62 | + echo "Failed to find Spark assembly in $assembly_folder" |
| 63 | + echo "You need to build Spark before running this program." |
| 64 | + exit 1 |
| 65 | +fi |
| 66 | +if [ "$num_jars" -gt "1" ]; then |
| 67 | + jars_list=$(ls "$assembly_folder" | grep "spark-assembly.*hadoop.*.jar") |
| 68 | + echo "Found multiple Spark assembly jars in $assembly_folder:" |
| 69 | + echo "$jars_list" |
| 70 | + echo "Please remove all but one jar." |
| 71 | + exit 1 |
| 72 | +fi |
| 73 | + |
| 74 | +ASSEMBLY_JAR=$(ls "$assembly_folder"/spark-assembly*hadoop*.jar 2>/dev/null) |
| 75 | + |
| 76 | +# Verify that versions of java used to build the jars and run Spark are compatible |
| 77 | +jar_error_check=$("$JAR_CMD" -tf "$ASSEMBLY_JAR" nonexistent/class/path 2>&1) |
| 78 | +if [[ "$jar_error_check" =~ "invalid CEN header" ]]; then |
| 79 | + echo "Loading Spark jar with '$JAR_CMD' failed. " 1>&2 |
| 80 | + echo "This is likely because Spark was compiled with Java 7 and run " 1>&2 |
| 81 | + echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark " 1>&2 |
| 82 | + echo "or build Spark with Java 6." 1>&2 |
| 83 | + exit 1 |
| 84 | +fi |
| 85 | + |
23 | 86 | CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
|
24 |
| -$FWDIR/bin/spark-class $CLASS $@ |
| 87 | +exec "$FWDIR"/bin/spark-submit --class $CLASS $@ $ASSEMBLY_JAR |
0 commit comments