Skip to content

Commit 69c3f44

Browse files
nchammasJoshRosen
authored andcommitted
[SPARK-3479] [Build] Report failed test category
This PR allows SparkQA (i.e. Jenkins) to report in its posts to GitHub what category of test failed, if one can be determined. The failure categories are: * general failure * RAT checks failed * Scala style checks failed * Python style checks failed * Build failed * Spark unit tests failed * PySpark unit tests failed * MiMa checks failed This PR also fixes the diffing logic used to determine if a patch introduces new classes. Author: Nicholas Chammas <[email protected]> Closes apache#2606 from nchammas/report-failed-test-category and squashes the following commits: d67df03 [Nicholas Chammas] report what test category failed
1 parent 2300eb5 commit 69c3f44

File tree

3 files changed

+126
-35
lines changed

3 files changed

+126
-35
lines changed

dev/run-tests

Lines changed: 30 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,16 @@ cd "$FWDIR"
2424
# Remove work directory
2525
rm -rf ./work
2626

27+
source "$FWDIR/dev/run-tests-codes.sh"
28+
29+
CURRENT_BLOCK=$BLOCK_GENERAL
30+
31+
function handle_error () {
32+
echo "[error] Got a return code of $? on line $1 of the run-tests script."
33+
exit $CURRENT_BLOCK
34+
}
35+
36+
2737
# Build against the right verison of Hadoop.
2838
{
2939
if [ -n "$AMPLAB_JENKINS_BUILD_PROFILE" ]; then
@@ -91,33 +101,43 @@ if [ -n "$AMPLAB_JENKINS" ]; then
91101
fi
92102
fi
93103

94-
# Fail fast
95-
set -e
96104
set -o pipefail
105+
trap 'handle_error $LINENO' ERR
97106

98107
echo ""
99108
echo "========================================================================="
100109
echo "Running Apache RAT checks"
101110
echo "========================================================================="
111+
112+
CURRENT_BLOCK=$BLOCK_RAT
113+
102114
./dev/check-license
103115

104116
echo ""
105117
echo "========================================================================="
106118
echo "Running Scala style checks"
107119
echo "========================================================================="
120+
121+
CURRENT_BLOCK=$BLOCK_SCALA_STYLE
122+
108123
./dev/lint-scala
109124

110125
echo ""
111126
echo "========================================================================="
112127
echo "Running Python style checks"
113128
echo "========================================================================="
129+
130+
CURRENT_BLOCK=$BLOCK_PYTHON_STYLE
131+
114132
./dev/lint-python
115133

116134
echo ""
117135
echo "========================================================================="
118136
echo "Building Spark"
119137
echo "========================================================================="
120138

139+
CURRENT_BLOCK=$BLOCK_BUILD
140+
121141
{
122142
# We always build with Hive because the PySpark Spark SQL tests need it.
123143
BUILD_MVN_PROFILE_ARGS="$SBT_MAVEN_PROFILES_ARGS -Phive"
@@ -141,6 +161,8 @@ echo "========================================================================="
141161
echo "Running Spark unit tests"
142162
echo "========================================================================="
143163

164+
CURRENT_BLOCK=$BLOCK_SPARK_UNIT_TESTS
165+
144166
{
145167
# If the Spark SQL tests are enabled, run the tests with the Hive profiles enabled.
146168
# This must be a single argument, as it is.
@@ -175,10 +197,16 @@ echo ""
175197
echo "========================================================================="
176198
echo "Running PySpark tests"
177199
echo "========================================================================="
200+
201+
CURRENT_BLOCK=$BLOCK_PYSPARK_UNIT_TESTS
202+
178203
./python/run-tests
179204

180205
echo ""
181206
echo "========================================================================="
182207
echo "Detecting binary incompatibilites with MiMa"
183208
echo "========================================================================="
209+
210+
CURRENT_BLOCK=$BLOCK_MIMA
211+
184212
./dev/mima

dev/run-tests-codes.sh

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
#!/usr/bin/env bash
2+
3+
#
4+
# Licensed to the Apache Software Foundation (ASF) under one or more
5+
# contributor license agreements. See the NOTICE file distributed with
6+
# this work for additional information regarding copyright ownership.
7+
# The ASF licenses this file to You under the Apache License, Version 2.0
8+
# (the "License"); you may not use this file except in compliance with
9+
# the License. You may obtain a copy of the License at
10+
#
11+
# http://www.apache.org/licenses/LICENSE-2.0
12+
#
13+
# Unless required by applicable law or agreed to in writing, software
14+
# distributed under the License is distributed on an "AS IS" BASIS,
15+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
# See the License for the specific language governing permissions and
17+
# limitations under the License.
18+
#
19+
20+
readonly BLOCK_GENERAL=10
21+
readonly BLOCK_RAT=11
22+
readonly BLOCK_SCALA_STYLE=12
23+
readonly BLOCK_PYTHON_STYLE=13
24+
readonly BLOCK_BUILD=14
25+
readonly BLOCK_SPARK_UNIT_TESTS=15
26+
readonly BLOCK_PYSPARK_UNIT_TESTS=16
27+
readonly BLOCK_MIMA=17

dev/run-tests-jenkins

Lines changed: 69 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,23 @@
2626
FWDIR="$(cd `dirname $0`/..; pwd)"
2727
cd "$FWDIR"
2828

29+
source "$FWDIR/dev/run-tests-codes.sh"
30+
2931
COMMENTS_URL="https://api.github.com/repos/apache/spark/issues/$ghprbPullId/comments"
3032
PULL_REQUEST_URL="https://github.com/apache/spark/pull/$ghprbPullId"
3133

34+
# Important Environment Variables
35+
# ---
36+
# $ghprbActualCommit
37+
#+ This is the hash of the most recent commit in the PR.
38+
#+ The merge-base of this and master is the commit from which the PR was branched.
39+
# $sha1
40+
#+ If the patch merges cleanly, this is a reference to the merge commit hash
41+
#+ (e.g. "origin/pr/2606/merge").
42+
#+ If the patch does not merge cleanly, it is equal to $ghprbActualCommit.
43+
#+ The merge-base of this and master in the case of a clean merge is the most recent commit
44+
#+ against master.
45+
3246
COMMIT_URL="https://github.com/apache/spark/commit/${ghprbActualCommit}"
3347
# GitHub doesn't auto-link short hashes when submitted via the API, unfortunately. :(
3448
SHORT_COMMIT_HASH="${ghprbActualCommit:0:7}"
@@ -84,42 +98,46 @@ function post_message () {
8498
fi
8599
}
86100

101+
102+
# We diff master...$ghprbActualCommit because that gets us changes introduced in the PR
103+
#+ and not anything else added to master since the PR was branched.
104+
87105
# check PR merge-ability and check for new public classes
88106
{
89107
if [ "$sha1" == "$ghprbActualCommit" ]; then
90-
merge_note=" * This patch **does not** merge cleanly!"
108+
merge_note=" * This patch **does not merge cleanly**."
91109
else
92110
merge_note=" * This patch merges cleanly."
111+
fi
112+
113+
source_files=$(
114+
git diff master...$ghprbActualCommit --name-only `# diff patch against master from branch point` \
115+
| grep -v -e "\/test" `# ignore files in test directories` \
116+
| grep -e "\.py$" -e "\.java$" -e "\.scala$" `# include only code files` \
117+
| tr "\n" " "
118+
)
119+
new_public_classes=$(
120+
git diff master...$ghprbActualCommit ${source_files} `# diff patch against master from branch point` \
121+
| grep "^\+" `# filter in only added lines` \
122+
| sed -r -e "s/^\+//g" `# remove the leading +` \
123+
| grep -e "trait " -e "class " `# filter in lines with these key words` \
124+
| grep -e "{" -e "(" `# filter in lines with these key words, too` \
125+
| grep -v -e "\@\@" -e "private" `# exclude lines with these words` \
126+
| grep -v -e "^// " -e "^/\*" -e "^ \* " `# exclude comment lines` \
127+
| sed -r -e "s/\{.*//g" `# remove from the { onwards` \
128+
| sed -r -e "s/\}//g" `# just in case, remove }; they mess the JSON` \
129+
| sed -r -e "s/\"/\\\\\"/g" `# escape double quotes; they mess the JSON` \
130+
| sed -r -e "s/^(.*)$/\`\1\`/g" `# surround with backticks for style` \
131+
| sed -r -e "s/^/ \* /g" `# prepend ' *' to start of line` \
132+
| sed -r -e "s/$/\\\n/g" `# append newline to end of line` \
133+
| tr -d "\n" `# remove actual LF characters`
134+
)
93135

94-
source_files=$(
95-
git diff master... --name-only `# diff patch against master from branch point` \
96-
| grep -v -e "\/test" `# ignore files in test directories` \
97-
| grep -e "\.py$" -e "\.java$" -e "\.scala$" `# include only code files` \
98-
| tr "\n" " "
99-
)
100-
new_public_classes=$(
101-
git diff master... ${source_files} `# diff patch against master from branch point` \
102-
| grep "^\+" `# filter in only added lines` \
103-
| sed -r -e "s/^\+//g" `# remove the leading +` \
104-
| grep -e "trait " -e "class " `# filter in lines with these key words` \
105-
| grep -e "{" -e "(" `# filter in lines with these key words, too` \
106-
| grep -v -e "\@\@" -e "private" `# exclude lines with these words` \
107-
| grep -v -e "^// " -e "^/\*" -e "^ \* " `# exclude comment lines` \
108-
| sed -r -e "s/\{.*//g" `# remove from the { onwards` \
109-
| sed -r -e "s/\}//g" `# just in case, remove }; they mess the JSON` \
110-
| sed -r -e "s/\"/\\\\\"/g" `# escape double quotes; they mess the JSON` \
111-
| sed -r -e "s/^(.*)$/\`\1\`/g" `# surround with backticks for style` \
112-
| sed -r -e "s/^/ \* /g" `# prepend ' *' to start of line` \
113-
| sed -r -e "s/$/\\\n/g" `# append newline to end of line` \
114-
| tr -d "\n" `# remove actual LF characters`
115-
)
116-
117-
if [ "$new_public_classes" == "" ]; then
118-
public_classes_note=" * This patch adds no public classes."
119-
else
120-
public_classes_note=" * This patch adds the following public classes _(experimental)_:"
121-
public_classes_note="${public_classes_note}\n${new_public_classes}"
122-
fi
136+
if [ -z "$new_public_classes" ]; then
137+
public_classes_note=" * This patch adds no public classes."
138+
else
139+
public_classes_note=" * This patch adds the following public classes _(experimental)_:"
140+
public_classes_note="${public_classes_note}\n${new_public_classes}"
123141
fi
124142
}
125143

@@ -147,12 +165,30 @@ function post_message () {
147165

148166
post_message "$fail_message"
149167
exit $test_result
168+
elif [ "$test_result" -eq "0" ]; then
169+
test_result_note=" * This patch **passes all tests**."
150170
else
151-
if [ "$test_result" -eq "0" ]; then
152-
test_result_note=" * This patch **passes** unit tests."
171+
if [ "$test_result" -eq "$BLOCK_GENERAL" ]; then
172+
failing_test="some tests"
173+
elif [ "$test_result" -eq "$BLOCK_RAT" ]; then
174+
failing_test="RAT tests"
175+
elif [ "$test_result" -eq "$BLOCK_SCALA_STYLE" ]; then
176+
failing_test="Scala style tests"
177+
elif [ "$test_result" -eq "$BLOCK_PYTHON_STYLE" ]; then
178+
failing_test="Python style tests"
179+
elif [ "$test_result" -eq "$BLOCK_BUILD" ]; then
180+
failing_test="to build"
181+
elif [ "$test_result" -eq "$BLOCK_SPARK_UNIT_TESTS" ]; then
182+
failing_test="Spark unit tests"
183+
elif [ "$test_result" -eq "$BLOCK_PYSPARK_UNIT_TESTS" ]; then
184+
failing_test="PySpark unit tests"
185+
elif [ "$test_result" -eq "$BLOCK_MIMA" ]; then
186+
failing_test="MiMa tests"
153187
else
154-
test_result_note=" * This patch **fails** unit tests."
188+
failing_test="some tests"
155189
fi
190+
191+
test_result_note=" * This patch **fails $failing_test**."
156192
fi
157193
}
158194

0 commit comments

Comments
 (0)