Skip to content

Commit 718afeb

Browse files
committed
Merge pull request apache#12 from apache/master
update
2 parents 6e643f8 + f5801e8 commit 718afeb

File tree

144 files changed

+3600
-856
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

144 files changed

+3600
-856
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
*.ipr
66
*.iml
77
*.iws
8+
*.pyc
89
.idea/
910
.idea_modules/
1011
sbt/*.jar
@@ -49,6 +50,8 @@ dependency-reduced-pom.xml
4950
checkpoint
5051
derby.log
5152
dist/
53+
dev/create-release/*txt
54+
dev/create-release/*new
5255
spark-*-bin-*.tgz
5356
unit-tests.log
5457
/lib/

bin/beeline.cmd

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
@echo off
2+
3+
rem
4+
rem Licensed to the Apache Software Foundation (ASF) under one or more
5+
rem contributor license agreements. See the NOTICE file distributed with
6+
rem this work for additional information regarding copyright ownership.
7+
rem The ASF licenses this file to You under the Apache License, Version 2.0
8+
rem (the "License"); you may not use this file except in compliance with
9+
rem the License. You may obtain a copy of the License at
10+
rem
11+
rem http://www.apache.org/licenses/LICENSE-2.0
12+
rem
13+
rem Unless required by applicable law or agreed to in writing, software
14+
rem distributed under the License is distributed on an "AS IS" BASIS,
15+
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
rem See the License for the specific language governing permissions and
17+
rem limitations under the License.
18+
rem
19+
20+
set SPARK_HOME=%~dp0..
21+
cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.hive.beeline.BeeLine %*

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1630,28 +1630,28 @@ object SparkContext extends Logging {
16301630
// following ones.
16311631

16321632
@deprecated("Replaced by implicit objects in AccumulatorParam. This is kept here only for " +
1633-
"backward compatibility.", "1.2.0")
1633+
"backward compatibility.", "1.3.0")
16341634
object DoubleAccumulatorParam extends AccumulatorParam[Double] {
16351635
def addInPlace(t1: Double, t2: Double): Double = t1 + t2
16361636
def zero(initialValue: Double) = 0.0
16371637
}
16381638

16391639
@deprecated("Replaced by implicit objects in AccumulatorParam. This is kept here only for " +
1640-
"backward compatibility.", "1.2.0")
1640+
"backward compatibility.", "1.3.0")
16411641
object IntAccumulatorParam extends AccumulatorParam[Int] {
16421642
def addInPlace(t1: Int, t2: Int): Int = t1 + t2
16431643
def zero(initialValue: Int) = 0
16441644
}
16451645

16461646
@deprecated("Replaced by implicit objects in AccumulatorParam. This is kept here only for " +
1647-
"backward compatibility.", "1.2.0")
1647+
"backward compatibility.", "1.3.0")
16481648
object LongAccumulatorParam extends AccumulatorParam[Long] {
16491649
def addInPlace(t1: Long, t2: Long) = t1 + t2
16501650
def zero(initialValue: Long) = 0L
16511651
}
16521652

16531653
@deprecated("Replaced by implicit objects in AccumulatorParam. This is kept here only for " +
1654-
"backward compatibility.", "1.2.0")
1654+
"backward compatibility.", "1.3.0")
16551655
object FloatAccumulatorParam extends AccumulatorParam[Float] {
16561656
def addInPlace(t1: Float, t2: Float) = t1 + t2
16571657
def zero(initialValue: Float) = 0f
@@ -1662,34 +1662,34 @@ object SparkContext extends Logging {
16621662
// and just call the corresponding functions in `object RDD`.
16631663

16641664
@deprecated("Replaced by implicit functions in the RDD companion object. This is " +
1665-
"kept here only for backward compatibility.", "1.2.0")
1665+
"kept here only for backward compatibility.", "1.3.0")
16661666
def rddToPairRDDFunctions[K, V](rdd: RDD[(K, V)])
16671667
(implicit kt: ClassTag[K], vt: ClassTag[V], ord: Ordering[K] = null) = {
16681668
RDD.rddToPairRDDFunctions(rdd)
16691669
}
16701670

16711671
@deprecated("Replaced by implicit functions in the RDD companion object. This is " +
1672-
"kept here only for backward compatibility.", "1.2.0")
1672+
"kept here only for backward compatibility.", "1.3.0")
16731673
def rddToAsyncRDDActions[T: ClassTag](rdd: RDD[T]) = RDD.rddToAsyncRDDActions(rdd)
16741674

16751675
@deprecated("Replaced by implicit functions in the RDD companion object. This is " +
1676-
"kept here only for backward compatibility.", "1.2.0")
1676+
"kept here only for backward compatibility.", "1.3.0")
16771677
def rddToSequenceFileRDDFunctions[K <% Writable: ClassTag, V <% Writable: ClassTag](
16781678
rdd: RDD[(K, V)]) =
16791679
RDD.rddToSequenceFileRDDFunctions(rdd)
16801680

16811681
@deprecated("Replaced by implicit functions in the RDD companion object. This is " +
1682-
"kept here only for backward compatibility.", "1.2.0")
1682+
"kept here only for backward compatibility.", "1.3.0")
16831683
def rddToOrderedRDDFunctions[K : Ordering : ClassTag, V: ClassTag](
16841684
rdd: RDD[(K, V)]) =
16851685
RDD.rddToOrderedRDDFunctions(rdd)
16861686

16871687
@deprecated("Replaced by implicit functions in the RDD companion object. This is " +
1688-
"kept here only for backward compatibility.", "1.2.0")
1688+
"kept here only for backward compatibility.", "1.3.0")
16891689
def doubleRDDToDoubleRDDFunctions(rdd: RDD[Double]) = RDD.doubleRDDToDoubleRDDFunctions(rdd)
16901690

16911691
@deprecated("Replaced by implicit functions in the RDD companion object. This is " +
1692-
"kept here only for backward compatibility.", "1.2.0")
1692+
"kept here only for backward compatibility.", "1.3.0")
16931693
def numericRDDToDoubleRDDFunctions[T](rdd: RDD[T])(implicit num: Numeric[T]) =
16941694
RDD.numericRDDToDoubleRDDFunctions(rdd)
16951695

@@ -1722,43 +1722,43 @@ object SparkContext extends Logging {
17221722
// and just call the corresponding functions in `object WritableConverter`.
17231723

17241724
@deprecated("Replaced by implicit functions in WritableConverter. This is kept here only for " +
1725-
"backward compatibility.", "1.2.0")
1725+
"backward compatibility.", "1.3.0")
17261726
def intWritableConverter(): WritableConverter[Int] =
17271727
WritableConverter.intWritableConverter()
17281728

17291729
@deprecated("Replaced by implicit functions in WritableConverter. This is kept here only for " +
1730-
"backward compatibility.", "1.2.0")
1730+
"backward compatibility.", "1.3.0")
17311731
def longWritableConverter(): WritableConverter[Long] =
17321732
WritableConverter.longWritableConverter()
17331733

17341734
@deprecated("Replaced by implicit functions in WritableConverter. This is kept here only for " +
1735-
"backward compatibility.", "1.2.0")
1735+
"backward compatibility.", "1.3.0")
17361736
def doubleWritableConverter(): WritableConverter[Double] =
17371737
WritableConverter.doubleWritableConverter()
17381738

17391739
@deprecated("Replaced by implicit functions in WritableConverter. This is kept here only for " +
1740-
"backward compatibility.", "1.2.0")
1740+
"backward compatibility.", "1.3.0")
17411741
def floatWritableConverter(): WritableConverter[Float] =
17421742
WritableConverter.floatWritableConverter()
17431743

17441744
@deprecated("Replaced by implicit functions in WritableConverter. This is kept here only for " +
1745-
"backward compatibility.", "1.2.0")
1745+
"backward compatibility.", "1.3.0")
17461746
def booleanWritableConverter(): WritableConverter[Boolean] =
17471747
WritableConverter.booleanWritableConverter()
17481748

17491749
@deprecated("Replaced by implicit functions in WritableConverter. This is kept here only for " +
1750-
"backward compatibility.", "1.2.0")
1750+
"backward compatibility.", "1.3.0")
17511751
def bytesWritableConverter(): WritableConverter[Array[Byte]] =
17521752
WritableConverter.bytesWritableConverter()
17531753

17541754
@deprecated("Replaced by implicit functions in WritableConverter. This is kept here only for " +
1755-
"backward compatibility.", "1.2.0")
1755+
"backward compatibility.", "1.3.0")
17561756
def stringWritableConverter(): WritableConverter[String] =
17571757
WritableConverter.stringWritableConverter()
17581758

17591759
@deprecated("Replaced by implicit functions in WritableConverter. This is kept here only for " +
1760-
"backward compatibility.", "1.2.0")
1761-
def writableWritableConverter[T <: Writable]() =
1760+
"backward compatibility.", "1.3.0")
1761+
def writableWritableConverter[T <: Writable](): WritableConverter[T] =
17621762
WritableConverter.writableWritableConverter()
17631763

17641764
/**
@@ -2017,15 +2017,15 @@ object WritableConverter {
20172017
simpleWritableConverter[Boolean, BooleanWritable](_.get)
20182018

20192019
implicit def bytesWritableConverter(): WritableConverter[Array[Byte]] = {
2020-
simpleWritableConverter[Array[Byte], BytesWritable](bw =>
2020+
simpleWritableConverter[Array[Byte], BytesWritable] { bw =>
20212021
// getBytes method returns array which is longer then data to be returned
20222022
Arrays.copyOfRange(bw.getBytes, 0, bw.getLength)
2023-
)
2023+
}
20242024
}
20252025

20262026
implicit def stringWritableConverter(): WritableConverter[String] =
20272027
simpleWritableConverter[String, Text](_.toString)
20282028

2029-
implicit def writableWritableConverter[T <: Writable]() =
2029+
implicit def writableWritableConverter[T <: Writable](): WritableConverter[T] =
20302030
new WritableConverter[T](_.runtimeClass.asInstanceOf[Class[T]], _.asInstanceOf[T])
20312031
}

core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@ import com.google.common.base.Optional
2828
import org.apache.hadoop.io.compress.CompressionCodec
2929

3030
import org.apache.spark._
31-
import org.apache.spark.SparkContext._
3231
import org.apache.spark.annotation.Experimental
3332
import org.apache.spark.api.java.JavaPairRDD._
3433
import org.apache.spark.api.java.JavaSparkContext.fakeClassTag
@@ -212,8 +211,9 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
212211
* Return an RDD of grouped elements. Each group consists of a key and a sequence of elements
213212
* mapping to that key.
214213
*/
215-
def groupBy[K](f: JFunction[T, K]): JavaPairRDD[K, JIterable[T]] = {
216-
implicit val ctagK: ClassTag[K] = fakeClassTag
214+
def groupBy[U](f: JFunction[T, U]): JavaPairRDD[U, JIterable[T]] = {
215+
// The type parameter is U instead of K in order to work around a compiler bug; see SPARK-4459
216+
implicit val ctagK: ClassTag[U] = fakeClassTag
217217
implicit val ctagV: ClassTag[JList[T]] = fakeClassTag
218218
JavaPairRDD.fromRDD(groupByResultToJava(rdd.groupBy(f)(fakeClassTag)))
219219
}
@@ -222,10 +222,11 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
222222
* Return an RDD of grouped elements. Each group consists of a key and a sequence of elements
223223
* mapping to that key.
224224
*/
225-
def groupBy[K](f: JFunction[T, K], numPartitions: Int): JavaPairRDD[K, JIterable[T]] = {
226-
implicit val ctagK: ClassTag[K] = fakeClassTag
225+
def groupBy[U](f: JFunction[T, U], numPartitions: Int): JavaPairRDD[U, JIterable[T]] = {
226+
// The type parameter is U instead of K in order to work around a compiler bug; see SPARK-4459
227+
implicit val ctagK: ClassTag[U] = fakeClassTag
227228
implicit val ctagV: ClassTag[JList[T]] = fakeClassTag
228-
JavaPairRDD.fromRDD(groupByResultToJava(rdd.groupBy(f, numPartitions)(fakeClassTag[K])))
229+
JavaPairRDD.fromRDD(groupByResultToJava(rdd.groupBy(f, numPartitions)(fakeClassTag[U])))
229230
}
230231

231232
/**
@@ -459,8 +460,9 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
459460
/**
460461
* Creates tuples of the elements in this RDD by applying `f`.
461462
*/
462-
def keyBy[K](f: JFunction[T, K]): JavaPairRDD[K, T] = {
463-
implicit val ctag: ClassTag[K] = fakeClassTag
463+
def keyBy[U](f: JFunction[T, U]): JavaPairRDD[U, T] = {
464+
// The type parameter is U instead of K in order to work around a compiler bug; see SPARK-4459
465+
implicit val ctag: ClassTag[U] = fakeClassTag
464466
JavaPairRDD.fromRDD(rdd.keyBy(f))
465467
}
466468

core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@ import org.apache.hadoop.io.compress.CompressionCodec
3434
import org.apache.hadoop.mapred.{InputFormat, OutputFormat, JobConf}
3535
import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat, OutputFormat => NewOutputFormat}
3636
import org.apache.spark._
37-
import org.apache.spark.SparkContext._
3837
import org.apache.spark.api.java.{JavaSparkContext, JavaPairRDD, JavaRDD}
3938
import org.apache.spark.broadcast.Broadcast
4039
import org.apache.spark.rdd.RDD

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -281,6 +281,11 @@ object SparkSubmit {
281281
sysProps.getOrElseUpdate(k, v)
282282
}
283283

284+
// Ignore invalid spark.driver.host in cluster modes.
285+
if (deployMode == CLUSTER) {
286+
sysProps -= ("spark.driver.host")
287+
}
288+
284289
// Resolve paths in certain spark properties
285290
val pathConfigs = Seq(
286291
"spark.jars",

core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,7 @@ private[spark] class AppClient(
134134
val fullId = appId + "/" + id
135135
logInfo("Executor added: %s on %s (%s) with %d cores".format(fullId, workerId, hostPort,
136136
cores))
137+
master ! ExecutorStateChanged(appId, id, ExecutorState.RUNNING, None, None)
137138
listener.executorAdded(fullId, workerId, hostPort, cores, memory)
138139

139140
case ExecutorUpdated(id, state, message, exitStatus) =>

core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -144,8 +144,6 @@ private[spark] class ExecutorRunner(
144144
Files.write(header, stderr, UTF_8)
145145
stderrAppender = FileAppender(process.getErrorStream, stderr, conf)
146146

147-
state = ExecutorState.RUNNING
148-
worker ! ExecutorStateChanged(appId, execId, state, None, None)
149147
// Wait for it to exit; executor may exit with code 0 (when driver instructs it to shutdown)
150148
// or with nonzero exit code
151149
val exitCode = process.waitFor()

core/src/main/scala/org/apache/spark/package.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,8 @@ package org.apache
2727
* contains operations available only on RDDs of Doubles; and
2828
* [[org.apache.spark.rdd.SequenceFileRDDFunctions]] contains operations available on RDDs that can
2929
* be saved as SequenceFiles. These operations are automatically available on any RDD of the right
30-
* type (e.g. RDD[(Int, Int)] through implicit conversions when you
31-
* `import org.apache.spark.SparkContext._`.
30+
* type (e.g. RDD[(Int, Int)] through implicit conversions except `saveAsSequenceFile`. You need to
31+
* `import org.apache.spark.SparkContext._` to make `saveAsSequenceFile` work.
3232
*
3333
* Java programmers should reference the [[org.apache.spark.api.java]] package
3434
* for Spark programming APIs in Java.

core/src/main/scala/org/apache/spark/rdd/AsyncRDDActions.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@ import org.apache.spark.{ComplexFutureAction, FutureAction, Logging}
2727

2828
/**
2929
* A set of asynchronous RDD actions available through an implicit conversion.
30-
* Import `org.apache.spark.SparkContext._` at the top of your program to use these functions.
3130
*/
3231
class AsyncRDDActions[T: ClassTag](self: RDD[T]) extends Serializable with Logging {
3332

0 commit comments

Comments
 (0)