Skip to content

Commit 8781f68

Browse files
authored
Merge pull request apache#58 from mesosphere/merge-cli-use-stub-universe
Switch to stub universe in builds
2 parents a0e2126 + 502210f commit 8781f68

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+1774
-228
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
build/
2+
dcos-commons-tools/
23
tests/env

Makefile

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,10 @@
11
docker:
22
bin/make-docker.sh
33

4-
package:
5-
bin/make-package.py
6-
7-
universe:
8-
bin/make-universe.sh
4+
build:
5+
bin/build.sh
96

107
test:
118
bin/test.sh
129

13-
.PHONY: package docker universe test
10+
.PHONY: build test

README.md

Lines changed: 8 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -11,38 +11,27 @@ edit `manifest.json`.
1111

1212
## Push a docker image
1313

14-
Build and push a docker image using the Spark distribution specified in `manifest.json`
14+
Build and push a docker image using the Spark distribution specified in `manifest.json`. This is also executed automatically via `make build`, below.
1515

1616
```
1717
DOCKER_IMAGE=<name> make docker
1818
```
1919

20-
## Create a package
20+
## Create a CLI package, docker image, and universe
2121

22-
Write a package to `build/package`. Use the `DOCKER_IMAGE` name you
23-
created above.
22+
The `DOCKER_IMAGE` value may either be a provided custom value, or may be left unset to automatically use the current git commit SHA. The created universe and CLI will be uploaded to a default S3 bucket, or may be customized as in the example below:
2423

2524
```
26-
DOCKER_IMAGE=<name> make package
25+
DOCKER_IMAGE=<name> \
26+
S3_BUCKET=<your-bucket> \
27+
S3_DIR_PATH=<base-path-in-bucket> \
28+
make build
2729
```
2830

29-
## Create a universe
30-
31-
Write a universe to `build/spark-universe`. You can then upload this to
32-
e.g. S3, and point your DC/OS cluster at it via `dcos package repo
33-
add`.
34-
35-
```
36-
make universe
37-
```
38-
39-
4031
## Test
4132

4233
```
4334
make test
4435
```
4536

46-
This requires several env variables, and is primarily used in CI. It
47-
calls `make package` and `make universe`. Read the comment at the top
48-
of the file for a complete description.
37+
This requires several env variables, and is primarily used in CI. Read the comment at the top of the file for a complete description.

bin/build.sh

Lines changed: 108 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
#!/usr/bin/env bash
2+
3+
# Builds and uploads:
4+
# - CLI to S3
5+
# - Spark docker image to dockerhub
6+
# - stub universe zip to S3
7+
#
8+
# Manifest config:
9+
# cli_version - version label to use for CLI package
10+
# spark_uri - where fetch spark distribution from (or SPARK_DIST_URI if provided)
11+
#
12+
# ENV vars:
13+
# COMMONS_TOOLS_DIR - path to dcos-commons/tools/, or empty to fetch latest release tgz
14+
# DOCKER_IMAGE - "<image>:<version>", falls back to mesosphere/spark-dev:COMMIT)
15+
# ghprbActualCommit / GIT_COMMIT - COMMIT value to use for DOCKER_IMAGE, if DOCKER_IMAGE isn't specified
16+
17+
BIN_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
18+
BASEDIR="${BIN_DIR}/.."
19+
20+
configure_env() {
21+
if [ -z "${SPARK_DIST_URI}" ]; then
22+
SPARK_DIST_URI=$(cat $BASEDIR/manifest.json | jq .spark_uri)
23+
SPARK_DIST_URI="${SPARK_DIST_URI%\"}"
24+
SPARK_DIST_URI="${SPARK_DIST_URI#\"}"
25+
echo "Using Spark dist URI: $SPARK_DIST_URI"
26+
fi
27+
28+
if [ -z "${CLI_VERSION}" ]; then
29+
CLI_VERSION=$(cat $BASEDIR/manifest.json | jq .cli_version)
30+
CLI_VERSION="${CLI_VERSION%\"}"
31+
CLI_VERSION="${CLI_VERSION#\"}"
32+
echo "Using CLI Version: $CLI_VERSION"
33+
fi
34+
35+
if [ -z "$DOCKER_IMAGE" ]; then
36+
# determine image label based on git commit:
37+
if [ -n "$ghprbActualCommit" ]; then
38+
# always overrides default GIT_COMMIT:
39+
GIT_COMMIT=$ghprbActualCommit
40+
fi
41+
if [ -z "$GIT_COMMIT" ]; then
42+
# Commit not explicitly provided by CI. Fetch directly from Git:
43+
GIT_COMMIT="$(git rev-parse HEAD)"
44+
fi
45+
if [ -z "$GIT_COMMIT" ]; then
46+
echo "Unable to determine git commit. Giving up."
47+
exit 1
48+
fi
49+
DOCKER_IMAGE="mesosphere/spark-dev:$GIT_COMMIT"
50+
echo "Using Docker image: $DOCKER_IMAGE"
51+
fi
52+
}
53+
54+
fetch_commons_tools() {
55+
if [ -z "${COMMONS_TOOLS_DIR}" ]; then
56+
pushd ${BIN_DIR}
57+
rm -rf dcos-commons-tools/ && curl https://infinity-artifacts.s3.amazonaws.com/dcos-commons-tools.tgz | tar xz
58+
popd
59+
export COMMONS_TOOLS_DIR=${BIN_DIR}/dcos-commons-tools/
60+
fi
61+
}
62+
63+
notify_github() {
64+
${COMMONS_TOOLS_DIR}/github_update.py $1 build $2
65+
}
66+
67+
build_cli() {
68+
notify_github pending "Building CLI"
69+
CLI_VERSION=$CLI_VERSION make --directory=$BASEDIR/cli env test packages
70+
if [ $? -ne 0 ]; then
71+
notify_github failure "CLI build failed"
72+
exit 1
73+
fi
74+
}
75+
76+
build_push_docker() {
77+
echo "###"
78+
echo "# Using docker image: $DOCKER_IMAGE"
79+
echo "###"
80+
notify_github pending "Building Docker: $DOCKER_IMAGE"
81+
$BIN_DIR/make-docker.sh
82+
if [ $? -ne 0 ]; then
83+
notify_github failure "Docker build failed"
84+
exit 1
85+
fi
86+
}
87+
88+
upload_cli_and_stub_universe() {
89+
# Build/upload package using custom template parameters: TEMPLATE_X_Y_Z => {{x-y-z}}
90+
TEMPLATE_SPARK_DIST_URI=${SPARK_DIST_URI} \
91+
TEMPLATE_DOCKER_IMAGE=${DOCKER_IMAGE} \
92+
TEMPLATE_CLI_VERSION=${CLI_VERSION} \
93+
${COMMONS_TOOLS_DIR}/ci_upload.py \
94+
spark \
95+
${BASEDIR}/package/ \
96+
${BASEDIR}/cli/dist/*.whl
97+
}
98+
99+
# set CLI_VERSION, SPARK_URI, and DOCKER_IMAGE:
100+
configure_env
101+
102+
fetch_commons_tools
103+
104+
build_cli
105+
build_push_docker
106+
notify_github success "Build succeeded"
107+
108+
upload_cli_and_stub_universe

bin/jenkins.sh

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,17 +53,19 @@ function upload_to_s3 {
5353
function update_manifest {
5454
pushd spark-build
5555

56-
# update manifest.json
56+
# update manifest.json with new spark dist:
5757
SPARK_DIST=$(ls ../spark/spark*.tgz)
5858
SPARK_URI="http://${S3_BUCKET}.s3.amazonaws.com/${S3_PREFIX}$(basename ${SPARK_DIST})"
5959
cat manifest.json | jq ".spark_uri=\"${SPARK_URI}\"" > manifest.json.tmp
6060
mv manifest.json.tmp manifest.json
6161

62+
6263
popd
6364
}
6465

6566
function install_cli {
6667
curl -O https://downloads.mesosphere.io/dcos-cli/install.sh
68+
rm -rf cli/
6769
mkdir cli
6870
bash install.sh cli http://change.me --add-path no
6971
source cli/bin/env-setup
@@ -84,7 +86,12 @@ function spark_test {
8486

8587
pushd spark-build
8688
docker_login
87-
make docker
89+
# build/upload artifacts: docker + cli + stub universe:
90+
make build
91+
# in CI environments, ci_test.py creates a 'stub-universe.properties' file
92+
# grab the STUB_UNIVERSE_URL from the file for use by test.sh:
93+
export $(cat $WORKSPACE/stub-universe.properties)
94+
# run tests against build artifacts:
8895
CLUSTER_NAME=spark-package-${BUILD_NUMBER} \
8996
TEST_DIR=$(pwd)/../mesos-spark-integration-tests/ \
9097
DCOS_CHANNEL=testing/master \

bin/launch-cluster.sh

Lines changed: 0 additions & 60 deletions
This file was deleted.

bin/make-docker.sh

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,11 @@
88

99
set -x -e -o pipefail
1010

11+
# The rest of this script currently assumes paths which are relative to the base repo dir:
12+
BIN_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
13+
BASEDIR="${BIN_DIR}/.."
14+
cd $BASEDIR
15+
1116
function fetch_spark() {
1217
mkdir -p build/dist
1318
[ -f "build/dist/${DIST_TGZ}" ] || curl -o "build/dist/${DIST_TGZ}" "${SPARK_DIST_URI}"

bin/make-package.py

Lines changed: 0 additions & 50 deletions
This file was deleted.

bin/make-universe.sh

Lines changed: 0 additions & 28 deletions
This file was deleted.

0 commit comments

Comments
 (0)