Skip to content

Commit 05b09a3

Browse files
author
Jacky Li
committed
fix conflict base on the latest master branch
2 parents 73c16b1 + 1955645 commit 05b09a3

File tree

440 files changed

+9150
-7866
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

440 files changed

+9150
-7866
lines changed

assembly/pom.xml

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -354,5 +354,25 @@
354354
</dependency>
355355
</dependencies>
356356
</profile>
357+
358+
<!-- Profiles that disable inclusion of certain dependencies. -->
359+
<profile>
360+
<id>hadoop-provided</id>
361+
<properties>
362+
<hadoop.deps.scope>provided</hadoop.deps.scope>
363+
</properties>
364+
</profile>
365+
<profile>
366+
<id>hive-provided</id>
367+
<properties>
368+
<hive.deps.scope>provided</hive.deps.scope>
369+
</properties>
370+
</profile>
371+
<profile>
372+
<id>parquet-provided</id>
373+
<properties>
374+
<parquet.deps.scope>provided</parquet.deps.scope>
375+
</properties>
376+
</profile>
357377
</profiles>
358378
</project>

bagel/pom.xml

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -40,15 +40,6 @@
4040
<artifactId>spark-core_${scala.binary.version}</artifactId>
4141
<version>${project.version}</version>
4242
</dependency>
43-
<dependency>
44-
<groupId>org.eclipse.jetty</groupId>
45-
<artifactId>jetty-server</artifactId>
46-
</dependency>
47-
<dependency>
48-
<groupId>org.scalatest</groupId>
49-
<artifactId>scalatest_${scala.binary.version}</artifactId>
50-
<scope>test</scope>
51-
</dependency>
5243
<dependency>
5344
<groupId>org.scalacheck</groupId>
5445
<artifactId>scalacheck_${scala.binary.version}</artifactId>
@@ -58,11 +49,5 @@
5849
<build>
5950
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
6051
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
61-
<plugins>
62-
<plugin>
63-
<groupId>org.scalatest</groupId>
64-
<artifactId>scalatest-maven-plugin</artifactId>
65-
</plugin>
66-
</plugins>
6752
</build>
6853
</project>

bagel/src/test/resources/log4j.properties

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,10 +15,10 @@
1515
# limitations under the License.
1616
#
1717

18-
# Set everything to be logged to the file bagel/target/unit-tests.log
18+
# Set everything to be logged to the file target/unit-tests.log
1919
log4j.rootCategory=INFO, file
2020
log4j.appender.file=org.apache.log4j.FileAppender
21-
log4j.appender.file.append=false
21+
log4j.appender.file.append=true
2222
log4j.appender.file.file=target/unit-tests.log
2323
log4j.appender.file.layout=org.apache.log4j.PatternLayout
2424
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n

bin/compute-classpath.cmd

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,13 @@ if "x%YARN_CONF_DIR%"=="x" goto no_yarn_conf_dir
109109
set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%
110110
:no_yarn_conf_dir
111111

112+
rem To allow for distributions to append needed libraries to the classpath (e.g. when
113+
rem using the "hadoop-provided" profile to build Spark), check SPARK_DIST_CLASSPATH and
114+
rem append it to tbe final classpath.
115+
if not "x%$SPARK_DIST_CLASSPATH%"=="x" (
116+
set CLASSPATH=%CLASSPATH%;%SPARK_DIST_CLASSPATH%
117+
)
118+
112119
rem A bit of a hack to allow calling this script within run2.cmd without seeing output
113120
if "%DONT_PRINT_CLASSPATH%"=="1" goto exit
114121

bin/compute-classpath.sh

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,4 +146,11 @@ if [ -n "$YARN_CONF_DIR" ]; then
146146
CLASSPATH="$CLASSPATH:$YARN_CONF_DIR"
147147
fi
148148

149+
# To allow for distributions to append needed libraries to the classpath (e.g. when
150+
# using the "hadoop-provided" profile to build Spark), check SPARK_DIST_CLASSPATH and
151+
# append it to tbe final classpath.
152+
if [ -n "$SPARK_DIST_CLASSPATH" ]; then
153+
CLASSPATH="$CLASSPATH:$SPARK_DIST_CLASSPATH"
154+
fi
155+
149156
echo "$CLASSPATH"

bin/spark-class

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ fi
148148
if [[ "$1" =~ org.apache.spark.tools.* ]]; then
149149
if test -z "$SPARK_TOOLS_JAR"; then
150150
echo "Failed to find Spark Tools Jar in $FWDIR/tools/target/scala-$SPARK_SCALA_VERSION/" 1>&2
151-
echo "You need to build Spark before running $1." 1>&2
151+
echo "You need to run \"build/sbt tools/package\" before running $1." 1>&2
152152
exit 1
153153
fi
154154
CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR"

bin/spark-submit

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,11 +38,19 @@ while (($#)); do
3838
export SPARK_SUBMIT_CLASSPATH=$2
3939
elif [ "$1" = "--driver-java-options" ]; then
4040
export SPARK_SUBMIT_OPTS=$2
41+
elif [ "$1" = "--master" ]; then
42+
export MASTER=$2
4143
fi
4244
shift
4345
done
4446

45-
DEFAULT_PROPERTIES_FILE="$SPARK_HOME/conf/spark-defaults.conf"
47+
if [ -z "$SPARK_CONF_DIR" ]; then
48+
export SPARK_CONF_DIR="$SPARK_HOME/conf"
49+
fi
50+
DEFAULT_PROPERTIES_FILE="$SPARK_CONF_DIR/spark-defaults.conf"
51+
if [ "$MASTER" == "yarn-cluster" ]; then
52+
SPARK_SUBMIT_DEPLOY_MODE=cluster
53+
fi
4654
export SPARK_SUBMIT_DEPLOY_MODE=${SPARK_SUBMIT_DEPLOY_MODE:-"client"}
4755
export SPARK_SUBMIT_PROPERTIES_FILE=${SPARK_SUBMIT_PROPERTIES_FILE:-"$DEFAULT_PROPERTIES_FILE"}
4856

bin/spark-submit2.cmd

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,11 @@ set ORIG_ARGS=%*
2424

2525
rem Reset the values of all variables used
2626
set SPARK_SUBMIT_DEPLOY_MODE=client
27-
set SPARK_SUBMIT_PROPERTIES_FILE=%SPARK_HOME%\conf\spark-defaults.conf
27+
28+
if not defined %SPARK_CONF_DIR% (
29+
set SPARK_CONF_DIR=%SPARK_HOME%\conf
30+
)
31+
set SPARK_SUBMIT_PROPERTIES_FILE=%SPARK_CONF_DIR%\spark-defaults.conf
2832
set SPARK_SUBMIT_DRIVER_MEMORY=
2933
set SPARK_SUBMIT_LIBRARY_PATH=
3034
set SPARK_SUBMIT_CLASSPATH=
@@ -45,11 +49,17 @@ if [%1] == [] goto continue
4549
set SPARK_SUBMIT_CLASSPATH=%2
4650
) else if [%1] == [--driver-java-options] (
4751
set SPARK_SUBMIT_OPTS=%2
52+
) else if [%1] == [--master] (
53+
set MASTER=%2
4854
)
4955
shift
5056
goto loop
5157
:continue
5258

59+
if [%MASTER%] == [yarn-cluster] (
60+
set SPARK_SUBMIT_DEPLOY_MODE=cluster
61+
)
62+
5363
rem For client mode, the driver will be launched in the same JVM that launches
5464
rem SparkSubmit, so we may need to read the properties file for any extra class
5565
rem paths, library paths, java options and memory early on. Otherwise, it will

core/pom.xml

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -276,11 +276,6 @@
276276
<artifactId>selenium-java</artifactId>
277277
<scope>test</scope>
278278
</dependency>
279-
<dependency>
280-
<groupId>org.scalatest</groupId>
281-
<artifactId>scalatest_${scala.binary.version}</artifactId>
282-
<scope>test</scope>
283-
</dependency>
284279
<dependency>
285280
<groupId>org.mockito</groupId>
286281
<artifactId>mockito-all</artifactId>
@@ -326,19 +321,6 @@
326321
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
327322
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
328323
<plugins>
329-
<plugin>
330-
<groupId>org.scalatest</groupId>
331-
<artifactId>scalatest-maven-plugin</artifactId>
332-
<executions>
333-
<execution>
334-
<id>test</id>
335-
<goals>
336-
<goal>test</goal>
337-
</goals>
338-
</execution>
339-
</executions>
340-
</plugin>
341-
342324
<!-- Unzip py4j so we can include its files in the jar -->
343325
<plugin>
344326
<groupId>org.apache.maven.plugins</groupId>
Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark;
19+
20+
import org.apache.spark.scheduler.SparkListener;
21+
import org.apache.spark.scheduler.SparkListenerApplicationEnd;
22+
import org.apache.spark.scheduler.SparkListenerApplicationStart;
23+
import org.apache.spark.scheduler.SparkListenerBlockManagerAdded;
24+
import org.apache.spark.scheduler.SparkListenerBlockManagerRemoved;
25+
import org.apache.spark.scheduler.SparkListenerEnvironmentUpdate;
26+
import org.apache.spark.scheduler.SparkListenerExecutorAdded;
27+
import org.apache.spark.scheduler.SparkListenerExecutorMetricsUpdate;
28+
import org.apache.spark.scheduler.SparkListenerExecutorRemoved;
29+
import org.apache.spark.scheduler.SparkListenerJobEnd;
30+
import org.apache.spark.scheduler.SparkListenerJobStart;
31+
import org.apache.spark.scheduler.SparkListenerStageCompleted;
32+
import org.apache.spark.scheduler.SparkListenerStageSubmitted;
33+
import org.apache.spark.scheduler.SparkListenerTaskEnd;
34+
import org.apache.spark.scheduler.SparkListenerTaskGettingResult;
35+
import org.apache.spark.scheduler.SparkListenerTaskStart;
36+
import org.apache.spark.scheduler.SparkListenerUnpersistRDD;
37+
38+
/**
39+
* Java clients should extend this class instead of implementing
40+
* SparkListener directly. This is to prevent java clients
41+
* from breaking when new events are added to the SparkListener
42+
* trait.
43+
*
44+
* This is a concrete class instead of abstract to enforce
45+
* new events get added to both the SparkListener and this adapter
46+
* in lockstep.
47+
*/
48+
public class JavaSparkListener implements SparkListener {
49+
50+
@Override
51+
public void onStageCompleted(SparkListenerStageCompleted stageCompleted) { }
52+
53+
@Override
54+
public void onStageSubmitted(SparkListenerStageSubmitted stageSubmitted) { }
55+
56+
@Override
57+
public void onTaskStart(SparkListenerTaskStart taskStart) { }
58+
59+
@Override
60+
public void onTaskGettingResult(SparkListenerTaskGettingResult taskGettingResult) { }
61+
62+
@Override
63+
public void onTaskEnd(SparkListenerTaskEnd taskEnd) { }
64+
65+
@Override
66+
public void onJobStart(SparkListenerJobStart jobStart) { }
67+
68+
@Override
69+
public void onJobEnd(SparkListenerJobEnd jobEnd) { }
70+
71+
@Override
72+
public void onEnvironmentUpdate(SparkListenerEnvironmentUpdate environmentUpdate) { }
73+
74+
@Override
75+
public void onBlockManagerAdded(SparkListenerBlockManagerAdded blockManagerAdded) { }
76+
77+
@Override
78+
public void onBlockManagerRemoved(SparkListenerBlockManagerRemoved blockManagerRemoved) { }
79+
80+
@Override
81+
public void onUnpersistRDD(SparkListenerUnpersistRDD unpersistRDD) { }
82+
83+
@Override
84+
public void onApplicationStart(SparkListenerApplicationStart applicationStart) { }
85+
86+
@Override
87+
public void onApplicationEnd(SparkListenerApplicationEnd applicationEnd) { }
88+
89+
@Override
90+
public void onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate executorMetricsUpdate) { }
91+
92+
@Override
93+
public void onExecutorAdded(SparkListenerExecutorAdded executorAdded) { }
94+
95+
@Override
96+
public void onExecutorRemoved(SparkListenerExecutorRemoved executorRemoved) { }
97+
}

0 commit comments

Comments
 (0)