Skip to content

Commit 0470587

Browse files
committed
- Erased unnecessary instance of -Phadoop-2.2 -Dhadoop.version=2.2.0 in create-release.sh
- Updated how the releases are made in the create-release.sh no that the default hadoop version is the 2.2.0 - Erased unnecessary instance of -Phadoop-2.2 -Dhadoop.version=2.2.0 in scalastyle - Erased unnecessary instance of -Phadoop-2.2 -Dhadoop.version=2.2.0 in run-tests - Better example given in the hadoop-third-party-distributions.md now that the default hadoop version is 2.2.0
1 parent a650779 commit 0470587

File tree

4 files changed

+10
-10
lines changed

4 files changed

+10
-10
lines changed

dev/create-release/create-release.sh

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -118,14 +118,14 @@ if [[ ! "$@" =~ --skip-publish ]]; then
118118

119119
rm -rf $SPARK_REPO
120120

121-
build/mvn -DskipTests -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
122-
-Pyarn -Phive -Phive-thriftserver -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
121+
build/mvn -DskipTests -Dyarn.version=2.2.0 \
122+
-Pyarn -Phive -Phive-thriftserver -Pspark-ganglia-lgpl -Pkinesis-asl \
123123
clean install
124124

125125
./dev/change-version-to-2.11.sh
126126

127-
build/mvn -DskipTests -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
128-
-Dscala-2.11 -Pyarn -Phive -Phadoop-2.2 -Pspark-ganglia-lgpl -Pkinesis-asl \
127+
build/mvn -DskipTests -Dyarn.version=2.2.0 \
128+
-Dscala-2.11 -Pyarn -Phive -Pspark-ganglia-lgpl -Pkinesis-asl \
129129
clean install
130130

131131
./dev/change-version-to-2.10.sh
@@ -228,8 +228,8 @@ if [[ ! "$@" =~ --skip-package ]]; then
228228

229229
# We increment the Zinc port each time to avoid OOM's and other craziness if multiple builds
230230
# share the same Zinc server.
231-
make_binary_release "hadoop1" "-Phive -Phive-thriftserver -Dhadoop.version=1.0.4" "3030" &
232-
make_binary_release "hadoop1-scala2.11" "-Phive -Dscala-2.11" "3031" &
231+
make_binary_release "hadoop2" "-Phive -Phive-thriftserver -Dhadoop.version=2.2.0" "3030" &
232+
make_binary_release "hadoop2-scala2.11" "-Phive -Dscala-2.11" "3031" &
233233
make_binary_release "cdh4" "-Phive -Phive-thriftserver -Dhadoop.version=2.0.0-mr1-cdh4.2.0" "3032" &
234234
make_binary_release "hadoop2.3" "-Phadoop-2.3 -Phive -Phive-thriftserver -Pyarn" "3033" &
235235
make_binary_release "hadoop2.4" "-Phadoop-2.4 -Phive -Phive-thriftserver -Pyarn" "3034" &

dev/run-tests

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ function handle_error () {
4444
elif [ "$AMPLAB_JENKINS_BUILD_PROFILE" = "hadoop2.0" ]; then
4545
export SBT_MAVEN_PROFILES_ARGS="-Dhadoop.version=2.0.0-mr1-cdh4.1.1"
4646
elif [ "$AMPLAB_JENKINS_BUILD_PROFILE" = "hadoop2.2" ]; then
47-
export SBT_MAVEN_PROFILES_ARGS="-Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0"
47+
export SBT_MAVEN_PROFILES_ARGS="-Pyarn"
4848
elif [ "$AMPLAB_JENKINS_BUILD_PROFILE" = "hadoop2.3" ]; then
4949
export SBT_MAVEN_PROFILES_ARGS="-Pyarn -Phadoop-2.3 -Dhadoop.version=2.3.0"
5050
fi

dev/scalastyle

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020
echo -e "q\n" | build/sbt -Phive -Phive-thriftserver scalastyle > scalastyle.txt
2121
echo -e "q\n" | build/sbt -Phive -Phive-thriftserver test:scalastyle >> scalastyle.txt
2222
# Check style with YARN built too
23-
echo -e "q\n" | build/sbt -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 scalastyle >> scalastyle.txt
24-
echo -e "q\n" | build/sbt -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 test:scalastyle >> scalastyle.txt
23+
echo -e "q\n" | build/sbt -Pyarn scalastyle >> scalastyle.txt
24+
echo -e "q\n" | build/sbt -Pyarn test:scalastyle >> scalastyle.txt
2525

2626
ERRORS=$(cat scalastyle.txt | awk '{if($1~/error/)print}')
2727
rm scalastyle.txt

docs/hadoop-third-party-distributions.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ property. For certain versions, you will need to specify additional profiles. Fo
1414
see the guide on [building with maven](building-spark.html#specifying-the-hadoop-version):
1515

1616
mvn -Dhadoop.version=1.0.4 -DskipTests clean package
17-
mvn -Phadoop-2.2 -Dhadoop.version=2.2.0 -DskipTests clean package
17+
mvn -Phadoop-2.3 -Dhadoop.version=2.3.0 -DskipTests clean package
1818

1919
The table below lists the corresponding `hadoop.version` code for each CDH/HDP release. Note that
2020
some Hadoop releases are binary compatible across client versions. This means the pre-built Spark

0 commit comments

Comments
 (0)