Skip to content

Commit b6b48d0

Browse files
skontoArthur Rand
authored andcommitted
improvements build (apache#204)
1 parent 3ba5002 commit b6b48d0

File tree

7 files changed

+20
-19
lines changed

7 files changed

+20
-19
lines changed

Makefile

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -5,32 +5,33 @@ CLI_DIST_DIR := $(BUILD_DIR)/cli_dist
55
DIST_DIR := $(BUILD_DIR)/dist
66
GIT_COMMIT := $(shell git rev-parse HEAD)
77

8-
S3_BUCKET := infinity-artifacts
9-
S3_PREFIX := autodelete7d
8+
S3_BUCKET ?= infinity-artifacts
9+
S3_PREFIX ?= autodelete7d
10+
SPARK_REPO_URL ?= https://github.com/mesosphere/spark
1011

1112
.ONESHELL:
1213
SHELL := /bin/bash
1314
.SHELLFLAGS = -ec
1415

1516
# This image can be used to build spark dist and run tests
16-
DOCKER_BUILD_IMAGE := mesosphere/spark-build:$(GIT_COMMIT)
17+
DOCKER_BUILD_IMAGE ?= mesosphere/spark-build:$(GIT_COMMIT)
1718
docker-build:
1819
docker build -t $(DOCKER_BUILD_IMAGE) .
1920
echo $(DOCKER_BUILD_IMAGE) > $@
2021

2122
# Pulls the spark distribution listed in the manifest as default
22-
SPARK_DIST_URI := $(shell jq ".default_spark_dist.uri" "$(ROOT_DIR)/manifest.json")
23+
SPARK_DIST_URI ?= $(shell jq ".default_spark_dist.uri" "$(ROOT_DIR)/manifest.json")
2324
manifest-dist:
2425
mkdir -p $(DIST_DIR)
2526
pushd $(DIST_DIR)
2627
wget $(SPARK_DIST_URI)
2728
popd
2829

29-
HADOOP_VERSION := $(shell jq ".default_spark_dist.hadoop_version" "$(ROOT_DIR)/manifest.json")
30+
HADOOP_VERSION ?= $(shell jq ".default_spark_dist.hadoop_version" "$(ROOT_DIR)/manifest.json")
3031

31-
SPARK_DIR := $(ROOT_DIR)/spark
32+
SPARK_DIR ?= $(ROOT_DIR)/spark
3233
$(SPARK_DIR):
33-
git clone https://github.com/mesosphere/spark $(SPARK_DIR)
34+
git clone $(SPARK_REPO_URL) $(SPARK_DIR)
3435

3536
# Builds a quick dev version of spark from the mesosphere fork
3637
dev-dist: $(SPARK_DIR)
@@ -85,7 +86,7 @@ clean-dist:
8586
docker-login:
8687
docker login --email="$(DOCKER_EMAIL)" --username="$(DOCKER_USERNAME)" --password="$(DOCKER_PASSWORD)"
8788

88-
DOCKER_DIST_IMAGE := mesosphere/spark-dev:$(GIT_COMMIT)
89+
DOCKER_DIST_IMAGE ?= mesosphere/spark-dev:$(GIT_COMMIT)
8990
docker-dist: $(DIST_DIR)
9091
tar xvf $(DIST_DIR)/spark-*.tgz -C $(DIST_DIR)
9192
rm -rf $(BUILD_DIR)/docker
@@ -99,7 +100,7 @@ docker-dist: $(DIST_DIR)
99100
docker push $(DOCKER_DIST_IMAGE)
100101
echo "$(DOCKER_DIST_IMAGE)" > $@
101102

102-
CLI_VERSION := $(shell jq -r ".cli_version" "$(ROOT_DIR)/manifest.json")
103+
CLI_VERSION ?= $(shell jq -r ".cli_version" "$(ROOT_DIR)/manifest.json")
103104
$(CLI_DIST_DIR):
104105
$(MAKE) --directory=cli all
105106
mkdir -p $@
@@ -110,7 +111,7 @@ $(CLI_DIST_DIR):
110111

111112
cli: $(CLI_DIST_DIR)
112113

113-
UNIVERSE_URL_PATH := stub-universe-url
114+
UNIVERSE_URL_PATH ?= stub-universe-url
114115
$(UNIVERSE_URL_PATH): $(CLI_DIST_DIR) docker-dist
115116
UNIVERSE_URL_PATH=$(UNIVERSE_URL_PATH) \
116117
TEMPLATE_CLI_VERSION=$(CLI_VERSION) \
@@ -123,7 +124,7 @@ $(UNIVERSE_URL_PATH): $(CLI_DIST_DIR) docker-dist
123124
$(CLI_DIST_DIR)/dcos-spark.exe \
124125
$(CLI_DIST_DIR)/*.whl;
125126

126-
DCOS_SPARK_TEST_JAR_PATH := $(ROOT_DIR)/dcos-spark-scala-tests-assembly-0.1-SNAPSHOT.jar
127+
DCOS_SPARK_TEST_JAR_PATH ?= $(ROOT_DIR)/dcos-spark-scala-tests-assembly-0.1-SNAPSHOT.jar
127128
$(DCOS_SPARK_TEST_JAR_PATH):
128129
cd tests/jobs/scala
129130
sbt assembly
@@ -154,15 +155,15 @@ clean-cluster:
154155
mesos-spark-integration-tests:
155156
git clone https://github.com/typesafehub/mesos-spark-integration-tests $(ROOT_DIR)/mesos-spark-integration-tests
156157

157-
MESOS_SPARK_TEST_JAR_PATH := $(ROOT_DIR)/mesos-spark-integration-tests-assembly-0.1.0.jar
158+
MESOS_SPARK_TEST_JAR_PATH ?= $(ROOT_DIR)/mesos-spark-integration-tests-assembly-0.1.0.jar
158159
$(MESOS_SPARK_TEST_JAR_PATH): mesos-spark-integration-tests
159160
cd $(ROOT_DIR)/mesos-spark-integration-tests/test-runner
160161
sbt assembly
161162
cd ..
162163
sbt clean compile test
163164
cp test-runner/target/scala-2.11/mesos-spark-integration-tests-assembly-0.1.0.jar $(MESOS_SPARK_TEST_JAR_PATH)
164165

165-
PYTEST_ARGS := -s -vv -m sanity
166+
PYTEST_ARGS ?= -s -vv -m sanity
166167
test: test-env $(DCOS_SPARK_TEST_JAR_PATH) $(MESOS_SPARK_TEST_JAR_PATH) $(UNIVERSE_URL_PATH) cluster-url
167168
source $(ROOT_DIR)/test-env/bin/activate
168169
if [ -z $(CLUSTER_URL) ]; then \

tools/build_cli.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/bin/bash
1+
#!/usr/bin/env bash
22

33
# exit immediately on failure
44
set -e

tools/build_framework.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/bin/bash
1+
#!/usr/bin/env bash
22

33
# Prevent jenkins from immediately killing the script when a step fails, allowing us to notify github:
44
set +e

tools/build_publishable.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/bin/bash
1+
#!/usr/bin/env bash
22

33
set -e
44
source ./tools/init_paths.sh

tools/release.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/bin/bash
1+
#!/usr/bin/env bash
22

33
#script to publish the tools dir to s3 for use in other jobs
44

tools/release_artifacts.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/bin/bash
1+
#!/usr/bin/env bash
22

33
set -e
44
source ${TOOLS_DIR}/init_paths.sh

tools/setup_permissions.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/bin/bash
1+
#!/usr/bin/env bash
22

33
LINUX_USER=$1
44
ROLE=$2

0 commit comments

Comments
 (0)