Skip to content

Commit e804624

Browse files
authored
Merge pull request apache#50 from mesosphere/new-cosmos
new cosmos
2 parents 2ff4a75 + eabdce4 commit e804624

File tree

7 files changed

+39
-30
lines changed

7 files changed

+39
-30
lines changed

bin/jenkins-dist-test.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/bin/bash
22

3-
export S3_BUCKET=spark-build
4-
export S3_PREFIX=
3+
export S3_BUCKET=infinity-artifacts
4+
export S3_PREFIX=spark
55
export AWS_ACCESS_KEY_ID=${DEV_AWS_ACCESS_KEY_ID}
66
export AWS_SECRET_ACCESS_KEY=${DEV_SECRET_ACCESS_KEY}
77
export DOCKER_IMAGE=mesosphere/spark-dev:${GIT_COMMIT}

bin/jenkins-package-publish.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/bin/bash
22

3-
export S3_BUCKET=spark-build
4-
export S3_PREFIX=
3+
export S3_BUCKET=infinity-artifacts
4+
export S3_PREFIX=spark
55
export DOCKER_IMAGE=mesosphere/spark:${GIT_BRANCH#refs/tags/}
66

77
source spark-build/bin/jenkins.sh

bin/jenkins-package-test.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/bin/bash
22

3-
export S3_BUCKET=spark-build
4-
export S3_PREFIX=
3+
export S3_BUCKET=infinity-artifacts
4+
export S3_PREFIX=spark
55
export DOCKER_IMAGE=mesosphere/spark-dev:${GIT_COMMIT}
66

77
source spark-build/bin/jenkins.sh

bin/jenkins.sh

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,9 @@ function install_cli {
7272

7373
# hack because the installer forces an old CLI version
7474
pip install -U dcoscli
75+
76+
# needed in `make test`
77+
pip3 install jsonschema
7578
}
7679

7780
function docker_login {

bin/make-universe.sh

Lines changed: 18 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,35 +1,36 @@
11
#!/usr/bin/env bash
22

3-
# creates
4-
# - build/spark-universe
5-
# - build/spark-universe.zip
3+
# 1) publishes universe docker image at DOCKER_IMAGE:DOCKER_TAG
4+
# 2) creates universe app at ./build/spark-universe/docker/server/target/marathon.json
65

76
set -x -e
87

98
rm -rf build/spark-universe*
109

11-
# make spark package
12-
# TODO(mgummelt): remove this after some time
13-
# ./bin/make-package.py
14-
1510
# download universe
16-
wget -O build/spark-universe.zip https://github.com/mesosphere/universe/archive/version-2.x.zip
11+
UNIVERSE_BRANCH=version-3.x
12+
wget -O build/spark-universe.zip "https://github.com/mesosphere/universe/archive/${UNIVERSE_BRANCH}.zip"
1713
unzip -d build build/spark-universe.zip
18-
mv build/universe-version-2.x build/spark-universe
14+
mv "build/universe-${UNIVERSE_BRANCH}" build/spark-universe
1915
rm build/spark-universe.zip
2016

2117
# make new universe
2218
SPARK_DIR=build/spark-universe/repo/packages/S/spark
23-
rm -rf ${SPARK_DIR}/*
24-
cp -r build/package ${SPARK_DIR}/0
19+
rm -rf "${SPARK_DIR}"/*
20+
cp -r build/package "${SPARK_DIR}/0"
2521

22+
# build universe docker image
2623
pushd build/spark-universe
2724
./scripts/build.sh
25+
DOCKER_TAG=spark-$(openssl rand -hex 8)
26+
DOCKER_TAG="${DOCKER_TAG}" ./docker/server/build.bash
27+
DOCKER_TAG="${DOCKER_TAG}" ./docker/server/build.bash publish
2828
popd
2929

30-
if [ -x "$(command -v zip)" ]; then
31-
(cd build && zip -r spark-universe.zip spark-universe)
32-
else
33-
# TODO: remove the docker wrapper once `zip` is available on TC
34-
docker run -v $(pwd)/build/:/build/ ubuntu:latest sh -c "apt-get update && apt-get install -y zip && cd /build/ && zip -r spark-universe.zip spark-universe"
35-
fi
30+
31+
# if [ -x "$(command -v zip)" ]; then
32+
# (cd build && zip -r spark-universe.zip spark-universe)
33+
# else
34+
# # TODO: remove the docker wrapper once `zip` is available on TC
35+
# docker run -v $(pwd)/build/:/build/ ubuntu:latest sh -c "apt-get update && apt-get install -y zip && cd /build/ && zip -r spark-universe.zip spark-universe"
36+
# fi

bin/test.sh

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,16 +30,17 @@ set -o pipefail
3030

3131

3232
build_universe() {
33-
./bin/make-package.py
33+
make package
3434
(cd build && tar czf package.tgz package)
35-
./bin/make-universe.sh
35+
36+
# temporarily unset DOCKER_IMAGE so it doesn't conflict with universe's build.bash
37+
(unset DOCKER_IMAGE && make universe)
3638
}
3739

3840
start_cluster() {
3941
if [ -z "${DCOS_URL}" ]; then
4042
DCOS_URL=http://$(./bin/launch-cluster.sh)
4143
fi
42-
#TOKEN=$(python -c "import requests;js={'token':'"${DCOS_OAUTH_TOKEN}"'};r=requests.post('"${DCOS_URL}"/acs/api/v1/auth/login',json=js);print(r.json()['token'])")
4344
TOKEN=$(python -c "import requests;js={'uid':'"${DCOS_USERNAME}"', 'password': '"${DCOS_PASSWORD}"'};r=requests.post('"${DCOS_URL}"/acs/api/v1/auth/login',json=js);print(r.json()['token'])")
4445
dcos config set core.dcos_acs_token ${TOKEN}
4546
}
@@ -48,9 +49,14 @@ configure_cli() {
4849
dcos config set core.dcos_url "${DCOS_URL}"
4950

5051
# add universe
51-
local S3_FILENAME="${S3_PREFIX}spark-universe-${BUILD_ID}.zip"
52-
aws s3 cp ./build/spark-universe.zip "s3://${S3_BUCKET}/${S3_FILENAME}" --acl public-read
53-
dcos package repo add --index=0 spark-test "http://${S3_BUCKET}.s3.amazonaws.com/${S3_FILENAME}"
52+
# local S3_FILENAME="${S3_PREFIX}spark-universe-${BUILD_ID}.zip"
53+
# aws s3 cp ./build/spark-universe.zip "s3://${S3_BUCKET}/${S3_FILENAME}" --acl public-read
54+
# dcos package repo add --index=0 spark-test "http://${S3_BUCKET}.s3.amazonaws.com/${S3_FILENAME}"
55+
dcos marathon app add ./build/spark-universe/docker/server/target/marathon.json
56+
dcos package repo add --index=0 spark-test http://universe.marathon.mesos:8085/repo-1.7
57+
58+
# wait for universe server to come up
59+
sleep 45
5460
}
5561

5662
install_spark() {

docker/Dockerfile

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,4 +57,3 @@ RUN ln -s /var/lib/runit/service/spark /etc/service/spark
5757
RUN ln -s /var/lib/runit/service/nginx /etc/service/nginx
5858

5959
WORKDIR /opt/spark/dist
60-

0 commit comments

Comments
 (0)