Skip to content

Commit 98b134f

Browse files
committed
Merge pull request #5 from apache/master
merge lastest spark
2 parents 161cae3 + 2c3f83c commit 98b134f

File tree

203 files changed

+4247
-1523
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

203 files changed

+4247
-1523
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ storage systems. Because the protocols have changed in different versions of
8585
Hadoop, you must build Spark against the same version that your cluster runs.
8686

8787
Please refer to the build documentation at
88-
["Specifying the Hadoop Version"](http://spark.apache.org/docs/latest/building-with-maven.html#specifying-the-hadoop-version)
88+
["Specifying the Hadoop Version"](http://spark.apache.org/docs/latest/building-spark.html#specifying-the-hadoop-version)
8989
for detailed guidance on building for a particular distribution of Hadoop, including
9090
building for particular Hive and Hive Thriftserver distributions. See also
9191
["Third Party Hadoop Distributions"](http://spark.apache.org/docs/latest/hadoop-third-party-distributions.html)

assembly/pom.xml

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -114,16 +114,6 @@
114114
<exclude>META-INF/*.RSA</exclude>
115115
</excludes>
116116
</filter>
117-
<filter>
118-
<!-- Exclude libgfortran, libgcc for license issues -->
119-
<artifact>org.jblas:jblas</artifact>
120-
<excludes>
121-
<!-- Linux amd64 is OK; not statically linked -->
122-
<exclude>lib/static/Linux/i386/**</exclude>
123-
<exclude>lib/static/Mac OS X/**</exclude>
124-
<exclude>lib/static/Windows/**</exclude>
125-
</excludes>
126-
</filter>
127117
</filters>
128118
</configuration>
129119
<executions>

bin/pyspark

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,6 @@ export PYTHONSTARTUP="$SPARK_HOME/python/pyspark/shell.py"
8989
if [[ -n "$SPARK_TESTING" ]]; then
9090
unset YARN_CONF_DIR
9191
unset HADOOP_CONF_DIR
92-
export PYSPARK_SUBMIT_ARGS=pyspark-shell
9392
if [[ -n "$PYSPARK_DOC_TEST" ]]; then
9493
exec "$PYSPARK_DRIVER_PYTHON" -m doctest $1
9594
else

conf/spark-env.sh.template

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
# - SPARK_PUBLIC_DNS, to set the public DNS name of the driver program
1616
# - SPARK_CLASSPATH, default classpath entries to append
1717
# - SPARK_LOCAL_DIRS, storage directories to use on this node for shuffle and RDD data
18-
# - MESOS_NATIVE_LIBRARY, to point to your libmesos.so if you use Mesos
18+
# - MESOS_NATIVE_JAVA_LIBRARY, to point to your libmesos.so if you use Mesos
1919

2020
# Options read in YARN client mode
2121
# - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -414,7 +414,7 @@
414414
<overWriteIfNewer>true</overWriteIfNewer>
415415
<useSubDirectoryPerType>true</useSubDirectoryPerType>
416416
<includeArtifactIds>
417-
guava,jetty-io,jetty-servlet,jetty-continuation,jetty-http,jetty-plus,jetty-util,jetty-server
417+
guava,jetty-io,jetty-servlet,jetty-continuation,jetty-http,jetty-plus,jetty-util,jetty-server,jetty-security
418418
</includeArtifactIds>
419419
<silent>true</silent>
420420
</configuration>

core/src/main/scala/org/apache/spark/ContextCleaner.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ private[spark] class ContextCleaner(sc: SparkContext) extends Logging {
145145
}
146146

147147
/** Keep cleaning RDD, shuffle, and broadcast state. */
148-
private def keepCleaning(): Unit = Utils.logUncaughtExceptions {
148+
private def keepCleaning(): Unit = Utils.tryOrStopSparkContext(sc) {
149149
while (!stopped) {
150150
try {
151151
val reference = Option(referenceQueue.remove(ContextCleaner.REF_QUEUE_POLL_TIMEOUT))

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1093,7 +1093,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
10931093
def addFile(path: String, recursive: Boolean): Unit = {
10941094
val uri = new URI(path)
10951095
val schemeCorrectedPath = uri.getScheme match {
1096-
case null | "local" => "file:" + uri.getPath
1096+
case null | "local" => new File(path).getCanonicalFile.toURI.toString
10971097
case _ => path
10981098
}
10991099

@@ -1736,7 +1736,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
17361736
}
17371737
}
17381738

1739-
listenerBus.start()
1739+
listenerBus.start(this)
17401740
}
17411741

17421742
/** Post the application start event */

core/src/main/scala/org/apache/spark/TaskState.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,5 +46,6 @@ private[spark] object TaskState extends Enumeration {
4646
case MesosTaskState.TASK_FAILED => FAILED
4747
case MesosTaskState.TASK_KILLED => KILLED
4848
case MesosTaskState.TASK_LOST => LOST
49+
case MesosTaskState.TASK_ERROR => LOST
4950
}
5051
}

core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,8 @@ import org.apache.spark.storage.StorageLevel
3232
import org.apache.spark.util.StatCounter
3333
import org.apache.spark.util.Utils
3434

35-
class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[JDouble, JavaDoubleRDD] {
35+
class JavaDoubleRDD(val srdd: RDD[scala.Double])
36+
extends AbstractJavaRDDLike[JDouble, JavaDoubleRDD] {
3637

3738
override val classTag: ClassTag[JDouble] = implicitly[ClassTag[JDouble]]
3839

core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ import org.apache.spark.util.Utils
4444

4545
class JavaPairRDD[K, V](val rdd: RDD[(K, V)])
4646
(implicit val kClassTag: ClassTag[K], implicit val vClassTag: ClassTag[V])
47-
extends JavaRDDLike[(K, V), JavaPairRDD[K, V]] {
47+
extends AbstractJavaRDDLike[(K, V), JavaPairRDD[K, V]] {
4848

4949
override def wrapRDD(rdd: RDD[(K, V)]): JavaPairRDD[K, V] = JavaPairRDD.fromRDD(rdd)
5050

0 commit comments

Comments
 (0)