@@ -32,10 +32,15 @@ GIT_USERNAME=${GIT_USERNAME:-pwendell}
32
32
GIT_PASSWORD=${GIT_PASSWORD:- XXX}
33
33
GPG_PASSPHRASE=${GPG_PASSPHRASE:- XXX}
34
34
GIT_BRANCH=${GIT_BRANCH:- branch-1.0}
35
- RELEASE_VERSION=${RELEASE_VERSION:- 1.0.0}
35
+ RELEASE_VERSION=${RELEASE_VERSION:- 1.2.0}
36
+ NEXT_VERSION=${NEXT_VERSION:- 1.2.1}
36
37
RC_NAME=${RC_NAME:- rc2}
37
38
USER_NAME=${USER_NAME:- pwendell}
38
39
40
+ M2_REPO=~ /.m2/repository
41
+ SPARK_REPO=$M2_REPO /org/apache/spark
42
+ NEXUS_REPOSITORY=https://repository.apache.org/service/local/staging/deploy/maven2
43
+
39
44
if [ -z " $JAVA_HOME " ]; then
40
45
echo " Error: JAVA_HOME is not set, cannot proceed."
41
46
exit -1
@@ -47,31 +52,59 @@ set -e
47
52
GIT_TAG=v$RELEASE_VERSION -$RC_NAME
48
53
49
54
if [[ ! " $@ " =~ --package-only ]]; then
50
- echo " Creating and publishing release "
55
+ echo " Creating release commit and publishing to Apache repository "
51
56
# Artifact publishing
52
- git clone https://git-wip-us.apache.org/repos/asf/spark.git -b $GIT_BRANCH
53
- cd spark
57
+ git clone https://$GIT_USERNAME :$GIT_PASSWORD @git-wip-us.apache.org/repos/asf/spark.git \
58
+ -b $GIT_BRANCH
59
+ pushd spark
54
60
export MAVEN_OPTS=" -Xmx3g -XX:MaxPermSize=1g -XX:ReservedCodeCacheSize=1g"
55
61
56
- mvn -Pyarn release:clean
62
+ echo " Creating tag $GIT_TAG at the head of $BRANCH_NAME "
63
+ git checkout -f $BRANCH_NAME
64
+ # TODO: We should update other things in the repo here
65
+ find . -name pom.xml | grep -v dev | xargs -I {} sed -i \
66
+ -e " s/${RELEASE_VERSION} -SNAPSHOT/$RELEASE_VERSION /" {}
67
+ git commit -a -m " Preparing Spark release $GIT_TAG "
68
+ git tag $GIT_TAG
57
69
70
+ rm -rf $SPARK_REPO
58
71
mvn -DskipTests \
59
- -Darguments=" -DskipTests=true -Dmaven.javadoc.skip=true -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 -Dgpg.passphrase=${GPG_PASSPHRASE} " \
60
- -Dusername=$GIT_USERNAME -Dpassword=$GIT_PASSWORD \
61
- -Dmaven.javadoc.skip=true \
62
72
-Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
63
73
-Dtag=$GIT_TAG -DautoVersionSubmodules=true \
64
74
-Pyarn -Phive -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
65
- --batch-mode release:prepare
66
-
67
- mvn -DskipTests \
68
- -Darguments=" -DskipTests=true -Dmaven.javadoc.skip=true -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 -Dgpg.passphrase=${GPG_PASSPHRASE} " \
69
- -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
70
- -Dmaven.javadoc.skip=true \
71
- -Pyarn -Phive -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
72
- release:perform
73
-
74
- cd ..
75
+ install
76
+
77
+ pushd $SPARK_REPO
78
+
79
+ # Remove any extra files generated during install
80
+ find . -type f | grep -v \. jar | grep -v \. pom | xargs rm
81
+
82
+ echo " Creating hash and signature files"
83
+ for file in $( find . -type f)
84
+ do
85
+ echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --output $file .asc --detach-sig --armour $file ;
86
+ gpg --print-md MD5 $file > $file .md5;
87
+ gpg --print-md SHA1 $file > $file .sha1
88
+ done
89
+
90
+ echo " Uplading files to $NEXUS_REPOSITORY "
91
+ for file in $( find . -type f)
92
+ do
93
+ # strip leading ./
94
+ file_short=$( echo $file | sed -e " s/\.\///" )
95
+ dest_url=" $NEXUS_REPOSITORY /org/apache/spark/$file_short "
96
+ echo " Uploading $file_short "
97
+ curl -u $NEXUS_USERNAME :$NEXUS_PASSWORD --upload-file $file_short $dest_url
98
+ done
99
+ popd
100
+
101
+ find . -name pom.xml | grep -v dev | xargs -I {} sed -i \
102
+ -e " s/$RELEASE_VERSION /${NEXT_VERSION} -SNAPSHOT/" {}
103
+ git commit -a -m " Preparing development version ${NEXT_VERSION} -SNAPSHOT"
104
+
105
+ git push origin $GIT_TAG
106
+ git push origin HEAD:$BRANCH_NAME
107
+ popd
75
108
rm -rf spark
76
109
fi
77
110
0 commit comments