Skip to content

Commit ebb2418

Browse files
authored
Merge branch 'master' into fix-spark-log
2 parents 365cc81 + bf6decf commit ebb2418

File tree

13 files changed

+424
-85
lines changed

13 files changed

+424
-85
lines changed

bin/jenkins-dist-publish.sh

Lines changed: 19 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,24 @@ set -e -x -o pipefail
88
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
99
SPARK_BUILD_DIR=${DIR}/..
1010

11+
function run() {
12+
# test
13+
source bin/jenkins.sh
14+
install_cli
15+
docker_login
16+
DIST_NAME="spark-${GIT_COMMIT}" make dist && export $(cat spark_dist_uri.properties)
17+
make universe && export $(cat "${WORKSPACE}/stub-universe.properties")
18+
make test
19+
20+
# publish
21+
rename_dist
22+
AWS_ACCESS_KEY_ID=${PROD_AWS_ACCESS_KEY_ID} \
23+
AWS_SECRET_ACCESS_KEY=${PROD_AWS_SECRET_ACCESS_KEY} \
24+
S3_BUCKET=${PROD_S3_BUCKET} \
25+
S3_PREFIX=${PROD_S3_PREFIX} \
26+
upload_to_s3
27+
}
28+
1129
pushd "${SPARK_BUILD_DIR}"
12-
VERSION=${GIT_BRANCH#origin/tags/custom-}
13-
DIST_NAME="spark-${VERSION}" make dist
30+
run
1431
popd

bin/jenkins.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ function make_distribution {
3232
function rename_dist {
3333
pushd "${SPARK_DIR}"
3434

35-
local VERSION=${GIT_BRANCH#refs/tags/custom-}
35+
local VERSION=${GIT_BRANCH#origin/tags/custom-}
3636

3737
# rename to spark-<tag>
3838
tar xvf spark-*.tgz

cli/dcos_spark/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
version = 'SNAPSHOT'
1+
version = '0.5.19'

conf/spark-env.sh

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,23 @@
77
# moves those config files into the standard directory. In DCOS, the
88
# CLI reads the "SPARK_HDFS_CONFIG_URL" marathon label in order to set
99
# spark.mesos.uris
10+
1011
mkdir -p "${HADOOP_CONF_DIR}"
1112
[ -f "${MESOS_SANDBOX}/hdfs-site.xml" ] && cp "${MESOS_SANDBOX}/hdfs-site.xml" "${HADOOP_CONF_DIR}"
1213
[ -f "${MESOS_SANDBOX}/core-site.xml" ] && cp "${MESOS_SANDBOX}/core-site.xml" "${HADOOP_CONF_DIR}"
1314

14-
MESOS_NATIVE_JAVA_LIBRARY=/usr/local/lib/libmesos.so
15+
cd $MESOS_SANDBOX
16+
17+
MESOS_NATIVE_JAVA_LIBRARY=/usr/lib/libmesos.so
1518

1619
# Support environments without DNS
17-
SPARK_LOCAL_IP=${LIBPROCESS_IP}
20+
if [ -n "$LIBPROCESS_IP" ]; then
21+
SPARK_LOCAL_IP=${LIBPROCESS_IP}
22+
fi
23+
24+
# I first set this to MESOS_SANDBOX, as a Workaround for MESOS-5866
25+
# But this fails now due to MESOS-6391, so I'm setting it to /tmp
26+
MESOS_DIRECTORY=/tmp
1827

1928
# Options read when launching programs locally with
2029
# ./bin/run-example or ./bin/spark-submit

docker/Dockerfile

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
# docker build -t spark:git-`git rev-parse --short HEAD` .
1919

2020
# Basing from Mesos image so the Mesos native library is present.
21-
FROM mesosphere/mesos-modules-private:git-e348e3f
21+
FROM mesosphere/mesos-modules-private:dcos-ee-mesos-modules-1.8.5-rc2
2222
MAINTAINER Michael Gummelt <[email protected]>
2323

2424
# Set environment variables.
@@ -27,24 +27,25 @@ ENV DEBCONF_NONINTERACTIVE_SEEN "true"
2727

2828
# Upgrade package index and install basic commands.
2929
RUN apt-get update && \
30-
apt-get install -y software-properties-common runit nginx
30+
apt-get install -y \
31+
software-properties-common \
32+
runit \
33+
nginx
34+
3135
RUN add-apt-repository ppa:openjdk-r/ppa
3236
RUN apt-get update && \
3337
apt-get install -y openjdk-8-jdk curl
3438

3539
ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64
36-
ENV MESOS_NATIVE_JAVA_LIBRARY /usr/local/lib/libmesos.so
40+
ENV MESOS_NATIVE_JAVA_LIBRARY /usr/lib/libmesos.so
3741
ENV HADOOP_CONF_DIR /etc/hadoop
3842

3943
RUN mkdir /etc/hadoop
4044

41-
ADD dist /opt/spark/dist
4245
ADD runit/service /var/lib/runit/service
4346
ADD runit/init.sh /sbin/init.sh
4447
ADD nginx /etc/nginx
4548

46-
#RUN ln -sf /usr/lib/libmesos.so /usr/lib/libmesos-0.23.1.so
47-
4849
# The following symlinks are hacks to make spark-class work under the
4950
# restricted PATH (/usr/bin) set by the DCOS
5051
# --executor-environment-variables option
@@ -55,4 +56,13 @@ RUN ln -s /bin/grep /usr/bin/grep
5556
RUN ln -s /var/lib/runit/service/spark /etc/service/spark
5657
RUN ln -s /var/lib/runit/service/nginx /etc/service/nginx
5758

59+
RUN chmod -R ugo+rw /etc/nginx
60+
RUN chmod -R ugo+rw /etc/service
61+
RUN chmod -R ugo+rw /var/lib/
62+
RUN chmod -R ugo+rw /var/run/
63+
RUN chmod -R ugo+rw /var/log/
64+
65+
ADD dist /opt/spark/dist
66+
RUN chmod -R ugo+rw /opt/spark/dist
67+
5868
WORKDIR /opt/spark/dist

docker/runit/service/spark/run

Lines changed: 68 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -4,64 +4,83 @@ set -x
44

55
exec 2>&1
66

7-
export APPLICATION_WEB_PROXY_BASE="${DISPATCHER_UI_WEB_PROXY_BASE}"
8-
9-
cd /opt/spark/dist
10-
11-
export SPARK_DAEMON_JAVA_OPTS=""
12-
if [ "${DCOS_SERVICE_NAME}" != "spark" ]; then
13-
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.deploy.zookeeper.dir=/spark_mesos_dispatcher_${DCOS_SERVICE_NAME}"
14-
fi
15-
16-
if [ "$SPARK_DISPATCHER_MESOS_ROLE" != "" ]; then
17-
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.mesos.role=$SPARK_DISPATCHER_MESOS_ROLE"
18-
fi
19-
20-
if [ "$SPARK_DISPATCHER_MESOS_PRINCIPAL" != "" ]; then
21-
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.mesos.principal=$SPARK_DISPATCHER_MESOS_PRINCIPAL"
22-
fi
23-
24-
if [ "$SPARK_DISPATCHER_MESOS_SECRET" != "" ]; then
25-
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.mesos.secret=$SPARK_DISPATCHER_MESOS_SECRET"
26-
fi
27-
28-
29-
30-
HISTORY_SERVER_CONF=""
31-
if [ "${ENABLE_HISTORY_SERVER:=false}" = "true" ]; then
32-
HISTORY_SERVER_CONF="spark.mesos.historyServer.url=${HISTORY_SERVER_WEB_PROXY_BASE}"
33-
fi
34-
35-
sed "s,<HISTORY_SERVER_CONF>,${HISTORY_SERVER_CONF}," \
36-
conf/mesos-cluster-dispatcher.properties.template >conf/mesos-cluster-dispatcher.properties
7+
function export_daemon_opts() {
8+
export SPARK_DAEMON_JAVA_OPTS=""
9+
if [ "${DCOS_SERVICE_NAME}" != "spark" ]; then
10+
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.deploy.zookeeper.dir=/spark_mesos_dispatcher_${DCOS_SERVICE_NAME}"
11+
fi
12+
13+
if [ "$SPARK_DISPATCHER_MESOS_ROLE" != "" ]; then
14+
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.mesos.role=$SPARK_DISPATCHER_MESOS_ROLE"
15+
fi
16+
17+
if [ "$SPARK_DISPATCHER_MESOS_PRINCIPAL" != "" ]; then
18+
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.mesos.principal=$SPARK_DISPATCHER_MESOS_PRINCIPAL"
19+
fi
20+
21+
if [ "$SPARK_DISPATCHER_MESOS_SECRET" != "" ]; then
22+
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.mesos.secret=$SPARK_DISPATCHER_MESOS_SECRET"
23+
fi
24+
}
3725

38-
sed "s,<LOG_LEVEL>,${SPARK_LOG_LEVEL}," \
39-
conf/log4j.properties.template >conf/log4j.properties
26+
function set_log_level() {
27+
sed "s,<LOG_LEVEL>,${SPARK_LOG_LEVEL}," \
28+
/opt/spark/dist/conf/log4j.properties.template >/opt/spark/dist/conf/log4j.properties
29+
}
4030

4131
function add_if_non_empty() {
4232
if [ -n "$2" ]; then
43-
echo "$1=$2" >> conf/mesos-cluster-dispatcher.properties
33+
echo "$1=$2" >> /opt/spark/dist/conf/mesos-cluster-dispatcher.properties
4434
fi
4535
}
4636

47-
if [ "${SPARK_SSL_KEYSTOREBASE64}" != "" ]; then
48-
echo "${SPARK_SSL_KEYSTOREBASE64}" | base64 -d > /tmp/dispatcher-keystore.jks
49-
add_if_non_empty spark.ssl.keyStore /tmp/dispatcher-keystore.jks
50-
fi
37+
function configure_properties() {
38+
HISTORY_SERVER_CONF=""
39+
if [ "${ENABLE_HISTORY_SERVER:=false}" = "true" ]; then
40+
HISTORY_SERVER_CONF="spark.mesos.historyServer.url=${HISTORY_SERVER_WEB_PROXY_BASE}"
41+
fi
42+
43+
sed "s,<HISTORY_SERVER_CONF>,${HISTORY_SERVER_CONF}," \
44+
/opt/spark/dist/conf/mesos-cluster-dispatcher.properties.template >/opt/spark/dist/conf/mesos-cluster-dispatcher.properties
45+
46+
if [ "${SPARK_SSL_KEYSTOREBASE64}" != "" ]; then
47+
echo "${SPARK_SSL_KEYSTOREBASE64}" | base64 -d > /tmp/dispatcher-keystore.jks
48+
add_if_non_empty spark.ssl.keyStore /tmp/dispatcher-keystore.jks
49+
fi
50+
51+
if [ "${SPARK_SSL_TRUSTSTOREBASE64}" != "" ]; then
52+
echo "${SPARK_SSL_TRUSTSTOREBASE64}" | base64 -d > /tmp/dispatcher-truststore.jks
53+
add_if_non_empty spark.ssl.trustStore /tmp/dispatcher-truststore.jks
54+
fi
55+
56+
add_if_non_empty spark.ssl.enabled "${SPARK_SSL_ENABLED}"
57+
add_if_non_empty spark.ssl.keyPassword "${SPARK_SSL_KEYPASSWORD}"
58+
add_if_non_empty spark.ssl.keyStorePassword "${SPARK_SSL_KEYSTOREPASSWORD}"
59+
add_if_non_empty spark.ssl.trustStorePassword "${SPARK_SSL_TRUSTSTOREPASSWORD}"
60+
add_if_non_empty spark.ssl.protocol "${SPARK_SSL_PROTOCOL}"
61+
add_if_non_empty spark.ssl.enabledAlgorithms "${SPARK_SSL_ENABLEDALGORITHMS}"
62+
63+
# write defaults
64+
if [ "${DCOS_SERVICE_ACCOUNT_CREDENTIAL}" != "" ]; then
65+
# write defaults using both property names, since 2.0 uses one and 2.1 uses the other
66+
echo "spark.mesos.dispatcher.driverDefault.spark.mesos.driverEnv.MESOS_MODULES=file:///opt/mesosphere/etc/mesos-scheduler-modules/dcos_authenticatee_module.json" >> /opt/spark/dist/conf/mesos-cluster-dispatcher.properties
67+
echo "spark.mesos.cluster.taskProperty.spark.mesos.driverEnv.MESOS_MODULES=file:///opt/mesosphere/etc/mesos-scheduler-modules/dcos_authenticatee_module.json" >> /opt/spark/dist/conf/mesos-cluster-dispatcher.properties
68+
69+
echo "spark.mesos.dispatcher.driverDefault.spark.mesos.driverEnv.MESOS_AUTHENTICATEE=com_mesosphere_dcos_ClassicRPCAuthenticatee" >> /opt/spark/dist/conf/mesos-cluster-dispatcher.properties
70+
echo "spark.mesos.cluster.taskProperty.spark.mesos.driverEnv.MESOS_AUTHENTICATEE=com_mesosphere_dcos_ClassicRPCAuthenticatee" >> /opt/spark/dist/conf/mesos-cluster-dispatcher.properties
71+
72+
echo "spark.mesos.dispatcher.driverDefault.spark.mesos.principal=${SPARK_DISPATCHER_MESOS_PRINCIPAL}" >> /opt/spark/dist/conf/mesos-cluster-dispatcher.properties
73+
echo "spark.mesos.cluster.taskProperty.spark.mesos.principal=${SPARK_DISPATCHER_MESOS_PRINCIPAL}" >> /opt/spark/dist/conf/mesos-cluster-dispatcher.properties
74+
fi
75+
}
5176

52-
if [ "${SPARK_SSL_TRUSTSTOREBASE64}" != "" ]; then
53-
echo "${SPARK_SSL_TRUSTSTOREBASE64}" | base64 -d > /tmp/dispatcher-truststore.jks
54-
add_if_non_empty spark.ssl.trustStore /tmp/dispatcher-truststore.jks
55-
fi
5677

57-
add_if_non_empty spark.ssl.enabled "${SPARK_SSL_ENABLED}"
58-
add_if_non_empty spark.ssl.keyPassword "${SPARK_SSL_KEYPASSWORD}"
59-
add_if_non_empty spark.ssl.keyStorePassword "${SPARK_SSL_KEYSTOREPASSWORD}"
60-
add_if_non_empty spark.ssl.trustStorePassword "${SPARK_SSL_TRUSTSTOREPASSWORD}"
61-
add_if_non_empty spark.ssl.protocol "${SPARK_SSL_PROTOCOL}"
62-
add_if_non_empty spark.ssl.enabledAlgorithms "${SPARK_SSL_ENABLEDALGORITHMS}"
78+
export APPLICATION_WEB_PROXY_BASE="${DISPATCHER_UI_WEB_PROXY_BASE}"
79+
set_log_level
80+
export_daemon_opts
81+
configure_properties
82+
ZK="master.mesos:2181"
6383

64-
export ZK="master.mesos:2181"
6584
exec /opt/spark/dist/bin/spark-class \
6685
org.apache.spark.deploy.mesos.MesosClusterDispatcher \
6786
--port "${DISPATCHER_PORT}" \
@@ -70,4 +89,4 @@ exec /opt/spark/dist/bin/spark-class \
7089
--zk "${ZK}" \
7190
--host "${HOST}" \
7291
--name "${DCOS_SERVICE_NAME}" \
73-
--properties-file "conf/mesos-cluster-dispatcher.properties"
92+
--properties-file "/opt/spark/dist/conf/mesos-cluster-dispatcher.properties"

0 commit comments

Comments
 (0)