21
21
FWDIR=" $( cd " ` dirname $0 ` " /..; pwd) "
22
22
cd " $FWDIR "
23
23
24
- if [ -n " $AMPLAB_JENKINS_BUILD_PROFILE " ]; then
25
- if [ " $AMPLAB_JENKINS_BUILD_PROFILE " = " hadoop1.0" ]; then
26
- export SBT_MAVEN_PROFILES_ARGS=" -Dhadoop.version=1.0.4"
27
- elif [ " $AMPLAB_JENKINS_BUILD_PROFILE " = " hadoop2.0" ]; then
28
- export SBT_MAVEN_PROFILES_ARGS=" -Dhadoop.version=2.0.0-mr1-cdh4.1.1"
29
- elif [ " $AMPLAB_JENKINS_BUILD_PROFILE " = " hadoop2.2" ]; then
30
- export SBT_MAVEN_PROFILES_ARGS=" -Pyarn -Dhadoop.version=2.2.0"
31
- elif [ " $AMPLAB_JENKINS_BUILD_PROFILE " = " hadoop2.3" ]; then
32
- export SBT_MAVEN_PROFILES_ARGS=" -Pyarn -Phadoop-2.3 -Dhadoop.version=2.3.0"
24
+ # Remove work directory
25
+ rm -rf ./work
26
+
27
+ # Build against the right verison of Hadoop.
28
+ {
29
+ if [ -n " $AMPLAB_JENKINS_BUILD_PROFILE " ]; then
30
+ if [ " $AMPLAB_JENKINS_BUILD_PROFILE " = " hadoop1.0" ]; then
31
+ export SBT_MAVEN_PROFILES_ARGS=" -Dhadoop.version=1.0.4"
32
+ elif [ " $AMPLAB_JENKINS_BUILD_PROFILE " = " hadoop2.0" ]; then
33
+ export SBT_MAVEN_PROFILES_ARGS=" -Dhadoop.version=2.0.0-mr1-cdh4.1.1"
34
+ elif [ " $AMPLAB_JENKINS_BUILD_PROFILE " = " hadoop2.2" ]; then
35
+ export SBT_MAVEN_PROFILES_ARGS=" -Pyarn -Dhadoop.version=2.2.0"
36
+ elif [ " $AMPLAB_JENKINS_BUILD_PROFILE " = " hadoop2.3" ]; then
37
+ export SBT_MAVEN_PROFILES_ARGS=" -Pyarn -Phadoop-2.3 -Dhadoop.version=2.3.0"
38
+ fi
33
39
fi
34
- fi
35
40
36
- if [ -z " $SBT_MAVEN_PROFILES_ARGS " ]; then
37
- export SBT_MAVEN_PROFILES_ARGS=" -Pyarn -Phadoop-2.3 -Dhadoop.version=2.3.0"
38
- fi
41
+ if [ -z " $SBT_MAVEN_PROFILES_ARGS " ]; then
42
+ export SBT_MAVEN_PROFILES_ARGS=" -Pyarn -Phadoop-2.3 -Dhadoop.version=2.3.0"
43
+ fi
44
+ }
39
45
40
46
export SBT_MAVEN_PROFILES_ARGS=" $SBT_MAVEN_PROFILES_ARGS -Pkinesis-asl"
41
47
42
- echo " SBT_MAVEN_PROFILES_ARGS=\" $SBT_MAVEN_PROFILES_ARGS \" "
43
-
44
- # Remove work directory
45
- rm -rf ./work
46
-
47
- if test -x " $JAVA_HOME /bin/java" ; then
48
- declare java_cmd=" $JAVA_HOME /bin/java"
49
- else
50
- declare java_cmd=java
51
- fi
52
- JAVA_VERSION=$( $java_cmd -version 2>&1 | sed ' s/java version "\(.*\)\.\(.*\)\..*"/\1\2/; 1q' )
53
- [ " $JAVA_VERSION " -ge 18 ] && echo " " || echo " [Warn] Java 8 tests will not run because JDK version is < 1.8."
48
+ # Determine Java path and version.
49
+ {
50
+ if test -x " $JAVA_HOME /bin/java" ; then
51
+ declare java_cmd=" $JAVA_HOME /bin/java"
52
+ else
53
+ declare java_cmd=java
54
+ fi
55
+
56
+ # We can't use sed -r -e due to OS X / BSD compatibility; hence, all the parentheses.
57
+ JAVA_VERSION=$(
58
+ $java_cmd -version 2>&1 \
59
+ | grep -e " ^java version" --max-count=1 \
60
+ | sed " s/java version \" \(.*\)\.\(.*\)\.\(.*\)\" /\1\2/"
61
+ )
62
+
63
+ if [ " $JAVA_VERSION " -lt 18 ]; then
64
+ echo " [warn] Java 8 tests will not run because JDK version is < 1.8."
65
+ fi
66
+ }
54
67
55
- # Partial solution for SPARK-1455. Only run Hive tests if there are sql changes.
68
+ # Only run Hive tests if there are sql changes.
69
+ # Partial solution for SPARK-1455.
56
70
if [ -n " $AMPLAB_JENKINS " ]; then
57
71
git fetch origin master:master
58
- diffs=` git diff --name-only master | grep " ^\(sql/\)\|\(bin/spark-sql\)\|\(sbin/start-thriftserver.sh\)" `
59
- if [ -n " $diffs " ]; then
60
- echo " Detected changes in SQL. Will run Hive test suite."
72
+
73
+ sql_diffs=$(
74
+ git diff --name-only selective-testing \
75
+ | grep -e " ^sql/" -e " ^bin/spark-sql" -e " ^sbin/start-thriftserver.sh"
76
+ )
77
+
78
+ non_sql_diffs=$(
79
+ git diff --name-only selective-testing \
80
+ | grep -v -e " ^sql/" -e " ^bin/spark-sql" -e " ^sbin/start-thriftserver.sh"
81
+ )
82
+
83
+ if [ -n " $sql_diffs " ]; then
84
+ echo " [info] Detected changes in SQL. Will run Hive test suite."
61
85
_RUN_SQL_TESTS=true
86
+
87
+ if [ -z " $non_sql_diffs " ]; then
88
+ echo " [info] Detected no changes except in SQL. Will only run SQL tests."
89
+ _SQL_TESTS_ONLY=true
90
+ fi
62
91
fi
63
92
fi
64
93
@@ -70,42 +99,69 @@ echo ""
70
99
echo " ========================================================================="
71
100
echo " Running Apache RAT checks"
72
101
echo " ========================================================================="
73
- dev/check-license
102
+ ./ dev/check-license
74
103
75
104
echo " "
76
105
echo " ========================================================================="
77
106
echo " Running Scala style checks"
78
107
echo " ========================================================================="
79
- dev/lint-scala
108
+ ./ dev/lint-scala
80
109
81
110
echo " "
82
111
echo " ========================================================================="
83
112
echo " Running Python style checks"
84
113
echo " ========================================================================="
85
- dev/lint-python
114
+ ./dev/lint-python
115
+
116
+ echo " "
117
+ echo " ========================================================================="
118
+ echo " Building Spark"
119
+ echo " ========================================================================="
120
+
121
+ {
122
+ # We always build with Hive because the PySpark Spark SQL tests need it.
123
+ BUILD_MVN_PROFILE_ARGS=" $SBT_MAVEN_PROFILES_ARGS -Phive"
124
+
125
+ echo " [info] Building Spark with these arguments: $BUILD_MVN_PROFILE_ARGS "
126
+
127
+ # NOTE: echo "q" is needed because sbt on encountering a build file with failure
128
+ # + (either resolution or compilation) prompts the user for input either q, r, etc
129
+ # + to quit or retry. This echo is there to make it not block.
130
+ # QUESTION: Why doesn't 'yes "q"' work?
131
+ # QUESTION: Why doesn't 'grep -v -e "^\[info\] Resolving"' work?
132
+ echo -e " q\n" \
133
+ | sbt/sbt $BUILD_MVN_PROFILE_ARGS clean package assembly/assembly \
134
+ | grep -v -e " info.*Resolving" -e " warn.*Merging" -e " info.*Including"
135
+ }
86
136
87
137
echo " "
88
138
echo " ========================================================================="
89
139
echo " Running Spark unit tests"
90
140
echo " ========================================================================="
91
141
92
- # Build Spark; we always build with Hive because the PySpark Spark SQL tests need it.
93
- # echo "q" is needed because sbt on encountering a build file with failure
94
- # (either resolution or compilation) prompts the user for input either q, r,
95
- # etc to quit or retry. This echo is there to make it not block.
96
- BUILD_MVN_PROFILE_ARGS=" $SBT_MAVEN_PROFILES_ARGS -Phive "
97
- echo -e " q\n" | sbt/sbt $BUILD_MVN_PROFILE_ARGS clean package assembly/assembly | \
98
- grep -v -e " info.*Resolving" -e " warn.*Merging" -e " info.*Including"
99
-
100
- # If the Spark SQL tests are enabled, run the tests with the Hive profiles enabled:
101
- if [ -n " $_RUN_SQL_TESTS " ]; then
102
- SBT_MAVEN_PROFILES_ARGS=" $SBT_MAVEN_PROFILES_ARGS -Phive"
103
- fi
104
- # echo "q" is needed because sbt on encountering a build file with failure
105
- # (either resolution or compilation) prompts the user for input either q, r,
106
- # etc to quit or retry. This echo is there to make it not block.
107
- echo -e " q\n" | sbt/sbt $SBT_MAVEN_PROFILES_ARGS test | \
108
- grep -v -e " info.*Resolving" -e " warn.*Merging" -e " info.*Including"
142
+ {
143
+ # If the Spark SQL tests are enabled, run the tests with the Hive profiles enabled.
144
+ if [ -n " $_RUN_SQL_TESTS " ]; then
145
+ SBT_MAVEN_PROFILES_ARGS=" $SBT_MAVEN_PROFILES_ARGS -Phive"
146
+ fi
147
+
148
+ if [ -n " $_SQL_TESTS_ONLY " ]; then
149
+ SBT_MAVEN_TEST_ARGS=" catalyst/test sql/test hive/test"
150
+ else
151
+ SBT_MAVEN_TEST_ARGS=" test"
152
+ fi
153
+
154
+ echo " [info] Running Spark tests with these arguments: $SBT_MAVEN_PROFILES_ARGS $SBT_MAVEN_TEST_ARGS "
155
+
156
+ # NOTE: echo "q" is needed because sbt on encountering a build file with failure
157
+ # + (either resolution or compilation) prompts the user for input either q, r, etc
158
+ # + to quit or retry. This echo is there to make it not block.
159
+ # QUESTION: Why doesn't 'yes "q"' work?
160
+ # QUESTION: Why doesn't 'grep -v -e "^\[info\] Resolving"' work?
161
+ echo -e " q\n" \
162
+ | sbt/sbt " $SBT_MAVEN_PROFILES_ARGS " " $SBT_MAVEN_TEST_ARGS " \
163
+ | grep -v -e " info.*Resolving" -e " warn.*Merging" -e " info.*Including"
164
+ }
109
165
110
166
echo " "
111
167
echo " ========================================================================="
@@ -117,4 +173,4 @@ echo ""
117
173
echo " ========================================================================="
118
174
echo " Detecting binary incompatibilites with MiMa"
119
175
echo " ========================================================================="
120
- dev/mima
176
+ ./ dev/mima
0 commit comments