28
28
# - Send output to stderr and have useful logging in stdout
29
29
30
30
# Note: The following variables must be set before use!
31
- GIT_USERNAME =${GIT_USERNAME :- pwendell}
32
- GIT_PASSWORD =${GIT_PASSWORD :- XXX}
31
+ ASF_USERNAME =${ASF_USERNAME :- pwendell}
32
+ ASF_PASSWORD =${ASF_PASSWORD :- XXX}
33
33
GPG_PASSPHRASE=${GPG_PASSPHRASE:- XXX}
34
34
GIT_BRANCH=${GIT_BRANCH:- branch-1.0}
35
- RELEASE_VERSION=${RELEASE_VERSION:- 1.0.0}
35
+ RELEASE_VERSION=${RELEASE_VERSION:- 1.2.0}
36
+ NEXT_VERSION=${NEXT_VERSION:- 1.2.1}
36
37
RC_NAME=${RC_NAME:- rc2}
37
- USER_NAME=${USER_NAME:- pwendell}
38
+
39
+ M2_REPO=~ /.m2/repository
40
+ SPARK_REPO=$M2_REPO /org/apache/spark
41
+ NEXUS_ROOT=https://repository.apache.org/service/local/staging
42
+ NEXUS_UPLOAD=$NEXUS_ROOT /deploy/maven2
43
+ NEXUS_PROFILE=d63f592e7eac0 # Profile for Spark staging uploads
38
44
39
45
if [ -z " $JAVA_HOME " ]; then
40
46
echo " Error: JAVA_HOME is not set, cannot proceed."
@@ -47,31 +53,90 @@ set -e
47
53
GIT_TAG=v$RELEASE_VERSION -$RC_NAME
48
54
49
55
if [[ ! " $@ " =~ --package-only ]]; then
50
- echo " Creating and publishing release "
56
+ echo " Creating release commit and publishing to Apache repository "
51
57
# Artifact publishing
52
- git clone https://git-wip-us.apache.org/repos/asf/spark.git -b $GIT_BRANCH
53
- cd spark
58
+ git clone https://$ASF_USERNAME :$ASF_PASSWORD @git-wip-us.apache.org/repos/asf/spark.git \
59
+ -b $GIT_BRANCH
60
+ pushd spark
54
61
export MAVEN_OPTS=" -Xmx3g -XX:MaxPermSize=1g -XX:ReservedCodeCacheSize=1g"
55
62
56
- mvn -Pyarn release:clean
57
-
58
- mvn -DskipTests \
59
- -Darguments=" -DskipTests=true -Dmaven.javadoc.skip=true -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 -Dgpg.passphrase=${GPG_PASSPHRASE} " \
60
- -Dusername=$GIT_USERNAME -Dpassword=$GIT_PASSWORD \
61
- -Dmaven.javadoc.skip=true \
62
- -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
63
- -Dtag=$GIT_TAG -DautoVersionSubmodules=true \
64
- -Pyarn -Phive -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
65
- --batch-mode release:prepare
66
-
67
- mvn -DskipTests \
68
- -Darguments=" -DskipTests=true -Dmaven.javadoc.skip=true -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 -Dgpg.passphrase=${GPG_PASSPHRASE} " \
69
- -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
70
- -Dmaven.javadoc.skip=true \
63
+ # Create release commits and push them to github
64
+ # NOTE: This is done "eagerly" i.e. we don't check if we can succesfully build
65
+ # or before we coin the release commit. This helps avoid races where
66
+ # other people add commits to this branch while we are in the middle of building.
67
+ old=" <version>${RELEASE_VERSION} -SNAPSHOT<\/version>"
68
+ new=" <version>${RELEASE_VERSION} <\/version>"
69
+ find . -name pom.xml -o -name package.scala | grep -v dev | xargs -I {} sed -i \
70
+ -e " s/$old /$new /" {}
71
+ git commit -a -m " Preparing Spark release $GIT_TAG "
72
+ echo " Creating tag $GIT_TAG at the head of $GIT_BRANCH "
73
+ git tag $GIT_TAG
74
+
75
+ old=" <version>${RELEASE_VERSION} <\/version>"
76
+ new=" <version>${NEXT_VERSION} -SNAPSHOT<\/version>"
77
+ find . -name pom.xml -o -name package.scala | grep -v dev | xargs -I {} sed -i \
78
+ -e " s/$old /$new /" {}
79
+ git commit -a -m " Preparing development version ${NEXT_VERSION} -SNAPSHOT"
80
+ git push origin $GIT_TAG
81
+ git push origin HEAD:$GIT_BRANCH
82
+ git checkout -f $GIT_TAG
83
+
84
+ # Using Nexus API documented here:
85
+ # https://support.sonatype.com/entries/39720203-Uploading-to-a-Staging-Repository-via-REST-API
86
+ echo " Creating Nexus staging repository"
87
+ repo_request=" <promoteRequest><data><description>Apache Spark $GIT_TAG </description></data></promoteRequest>"
88
+ out=$( curl -X POST -d " $repo_request " -u $ASF_USERNAME :$ASF_PASSWORD \
89
+ -H " Content-Type:application/xml" -v \
90
+ $NEXUS_ROOT /profiles/$NEXUS_PROFILE /start)
91
+ staged_repo_id=$( echo $out | sed -e " s/.*\(orgapachespark-[0-9]\{4\}\).*/\1/" )
92
+ echo " Created Nexus staging repository: $staged_repo_id "
93
+
94
+ rm -rf $SPARK_REPO
95
+
96
+ mvn -DskipTests -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
71
97
-Pyarn -Phive -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
72
- release:perform
98
+ clean install
73
99
74
- cd ..
100
+ ./dev/change-version-to-2.11.sh
101
+
102
+ mvn -DskipTests -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
103
+ -Dscala-2.11 -Pyarn -Phive -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
104
+ clean install
105
+
106
+ ./dev/change-version-to-2.10.sh
107
+
108
+ pushd $SPARK_REPO
109
+
110
+ # Remove any extra files generated during install
111
+ find . -type f | grep -v \. jar | grep -v \. pom | xargs rm
112
+
113
+ echo " Creating hash and signature files"
114
+ for file in $( find . -type f)
115
+ do
116
+ echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --output $file .asc --detach-sig --armour $file ;
117
+ gpg --print-md MD5 $file > $file .md5;
118
+ gpg --print-md SHA1 $file > $file .sha1
119
+ done
120
+
121
+ echo " Uplading files to $NEXUS_UPLOAD "
122
+ for file in $( find . -type f)
123
+ do
124
+ # strip leading ./
125
+ file_short=$( echo $file | sed -e " s/\.\///" )
126
+ dest_url=" $NEXUS_UPLOAD /org/apache/spark/$file_short "
127
+ echo " Uploading $file_short "
128
+ curl -u $ASF_USERNAME :$ASF_PASSWORD --upload-file $file_short $dest_url
129
+ done
130
+
131
+ echo " Closing nexus staging repository"
132
+ repo_request=" <promoteRequest><data><stagedRepositoryId>$staged_repo_id </stagedRepositoryId><description>Apache Spark $GIT_TAG </description></data></promoteRequest>"
133
+ out=$( curl -X POST -d " $repo_request " -u $ASF_USERNAME :$ASF_PASSWORD \
134
+ -H " Content-Type:application/xml" -v \
135
+ $NEXUS_ROOT /profiles/$NEXUS_PROFILE /finish)
136
+ echo " Closed Nexus staging repository: $staged_repo_id "
137
+
138
+ popd
139
+ popd
75
140
rm -rf spark
76
141
fi
77
142
@@ -102,6 +167,12 @@ make_binary_release() {
102
167
cp -r spark spark-$RELEASE_VERSION -bin-$NAME
103
168
104
169
cd spark-$RELEASE_VERSION -bin-$NAME
170
+
171
+ # TODO There should probably be a flag to make-distribution to allow 2.11 support
172
+ if [[ $FLAGS == * scala-2.11* ]]; then
173
+ ./dev/change-version-to-2.11.sh
174
+ fi
175
+
105
176
./make-distribution.sh --name $NAME --tgz $FLAGS 2>&1 | tee ../binary-release-$NAME .log
106
177
cd ..
107
178
cp spark-$RELEASE_VERSION -bin-$NAME /spark-$RELEASE_VERSION -bin-$NAME .tgz .
@@ -118,22 +189,23 @@ make_binary_release() {
118
189
spark-$RELEASE_VERSION -bin-$NAME .tgz.sha
119
190
}
120
191
192
+
121
193
make_binary_release " hadoop1" " -Phive -Phive-thriftserver -Dhadoop.version=1.0.4" &
194
+ make_binary_release " hadoop1-scala2.11" " -Phive -Dscala-2.11" &
122
195
make_binary_release " cdh4" " -Phive -Phive-thriftserver -Dhadoop.version=2.0.0-mr1-cdh4.2.0" &
123
196
make_binary_release " hadoop2.3" " -Phadoop-2.3 -Phive -Phive-thriftserver -Pyarn" &
124
197
make_binary_release " hadoop2.4" " -Phadoop-2.4 -Phive -Phive-thriftserver -Pyarn" &
125
- make_binary_release " hadoop2.4-without-hive" " -Phadoop-2.4 -Pyarn" &
126
198
make_binary_release " mapr3" " -Pmapr3 -Phive -Phive-thriftserver" &
127
199
make_binary_release " mapr4" " -Pmapr4 -Pyarn -Phive -Phive-thriftserver" &
128
200
wait
129
201
130
202
# Copy data
131
203
echo " Copying release tarballs"
132
204
rc_folder=spark-$RELEASE_VERSION -$RC_NAME
133
- ssh $USER_NAME @people.apache.org \
134
- mkdir /home/$USER_NAME /public_html/$rc_folder
205
+ ssh $ASF_USERNAME @people.apache.org \
206
+ mkdir /home/$ASF_USERNAME /public_html/$rc_folder
135
207
scp spark-* \
136
- $USER_NAME @people.apache.org:/home/$USER_NAME /public_html/$rc_folder /
208
+ $ASF_USERNAME @people.apache.org:/home/$ASF_USERNAME /public_html/$rc_folder /
137
209
138
210
# Docs
139
211
cd spark
@@ -143,12 +215,12 @@ cd docs
143
215
JAVA_HOME=$JAVA_7_HOME PRODUCTION=1 jekyll build
144
216
echo " Copying release documentation"
145
217
rc_docs_folder=${rc_folder} -docs
146
- ssh $USER_NAME @people.apache.org \
147
- mkdir /home/$USER_NAME /public_html/$rc_docs_folder
148
- rsync -r _site/* $USER_NAME @people.apache.org:/home/$USER_NAME /public_html/$rc_docs_folder
218
+ ssh $ASF_USERNAME @people.apache.org \
219
+ mkdir /home/$ASF_USERNAME /public_html/$rc_docs_folder
220
+ rsync -r _site/* $ASF_USERNAME @people.apache.org:/home/$ASF_USERNAME /public_html/$rc_docs_folder
149
221
150
222
echo " Release $RELEASE_VERSION completed:"
151
223
echo " Git tag:\t $GIT_TAG "
152
224
echo " Release commit:\t $release_hash "
153
- echo " Binary location:\t http://people.apache.org/~$USER_NAME /$rc_folder "
154
- echo " Doc location:\t http://people.apache.org/~$USER_NAME /$rc_docs_folder "
225
+ echo " Binary location:\t http://people.apache.org/~$ASF_USERNAME /$rc_folder "
226
+ echo " Doc location:\t http://people.apache.org/~$ASF_USERNAME /$rc_docs_folder "
0 commit comments