Skip to content

Commit d00303b

Browse files
committed
Merge pull request apache#6 from apache/master
merge lastest spark
2 parents 98b134f + 1afcf77 commit d00303b

File tree

364 files changed

+3063
-1650
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

364 files changed

+3063
-1650
lines changed

assembly/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
<parent>
2222
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent_2.10</artifactId>
24-
<version>1.3.0-SNAPSHOT</version>
24+
<version>1.4.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

bagel/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
<parent>
2222
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent_2.10</artifactId>
24-
<version>1.3.0-SNAPSHOT</version>
24+
<version>1.4.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
<parent>
2222
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent_2.10</artifactId>
24-
<version>1.3.0-SNAPSHOT</version>
24+
<version>1.4.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

core/src/main/scala/org/apache/spark/Accumulators.scala

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,6 @@
1818
package org.apache.spark
1919

2020
import java.io.{ObjectInputStream, Serializable}
21-
import java.util.concurrent.atomic.AtomicLong
22-
import java.lang.ThreadLocal
2321

2422
import scala.collection.generic.Growable
2523
import scala.collection.mutable.Map
@@ -109,7 +107,7 @@ class Accumulable[R, T] (
109107
* The typical use of this method is to directly mutate the local value, eg., to add
110108
* an element to a Set.
111109
*/
112-
def localValue = value_
110+
def localValue: R = value_
113111

114112
/**
115113
* Set the accumulator's value; only allowed on master.
@@ -137,7 +135,7 @@ class Accumulable[R, T] (
137135
Accumulators.register(this, false)
138136
}
139137

140-
override def toString = if (value_ == null) "null" else value_.toString
138+
override def toString: String = if (value_ == null) "null" else value_.toString
141139
}
142140

143141
/**
@@ -257,22 +255,22 @@ object AccumulatorParam {
257255

258256
implicit object DoubleAccumulatorParam extends AccumulatorParam[Double] {
259257
def addInPlace(t1: Double, t2: Double): Double = t1 + t2
260-
def zero(initialValue: Double) = 0.0
258+
def zero(initialValue: Double): Double = 0.0
261259
}
262260

263261
implicit object IntAccumulatorParam extends AccumulatorParam[Int] {
264262
def addInPlace(t1: Int, t2: Int): Int = t1 + t2
265-
def zero(initialValue: Int) = 0
263+
def zero(initialValue: Int): Int = 0
266264
}
267265

268266
implicit object LongAccumulatorParam extends AccumulatorParam[Long] {
269-
def addInPlace(t1: Long, t2: Long) = t1 + t2
270-
def zero(initialValue: Long) = 0L
267+
def addInPlace(t1: Long, t2: Long): Long = t1 + t2
268+
def zero(initialValue: Long): Long = 0L
271269
}
272270

273271
implicit object FloatAccumulatorParam extends AccumulatorParam[Float] {
274-
def addInPlace(t1: Float, t2: Float) = t1 + t2
275-
def zero(initialValue: Float) = 0f
272+
def addInPlace(t1: Float, t2: Float): Float = t1 + t2
273+
def zero(initialValue: Float): Float = 0f
276274
}
277275

278276
// TODO: Add AccumulatorParams for other types, e.g. lists and strings
@@ -351,6 +349,7 @@ private[spark] object Accumulators extends Logging {
351349
}
352350
}
353351

354-
def stringifyPartialValue(partialValue: Any) = "%s".format(partialValue)
355-
def stringifyValue(value: Any) = "%s".format(value)
352+
def stringifyPartialValue(partialValue: Any): String = "%s".format(partialValue)
353+
354+
def stringifyValue(value: Any): String = "%s".format(value)
356355
}

core/src/main/scala/org/apache/spark/Dependency.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ class ShuffleDependency[K, V, C](
7474
val mapSideCombine: Boolean = false)
7575
extends Dependency[Product2[K, V]] {
7676

77-
override def rdd = _rdd.asInstanceOf[RDD[Product2[K, V]]]
77+
override def rdd: RDD[Product2[K, V]] = _rdd.asInstanceOf[RDD[Product2[K, V]]]
7878

7979
val shuffleId: Int = _rdd.context.newShuffleId()
8080

@@ -91,7 +91,7 @@ class ShuffleDependency[K, V, C](
9191
*/
9292
@DeveloperApi
9393
class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {
94-
override def getParents(partitionId: Int) = List(partitionId)
94+
override def getParents(partitionId: Int): List[Int] = List(partitionId)
9595
}
9696

9797

@@ -107,7 +107,7 @@ class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {
107107
class RangeDependency[T](rdd: RDD[T], inStart: Int, outStart: Int, length: Int)
108108
extends NarrowDependency[T](rdd) {
109109

110-
override def getParents(partitionId: Int) = {
110+
override def getParents(partitionId: Int): List[Int] = {
111111
if (partitionId >= outStart && partitionId < outStart + length) {
112112
List(partitionId - outStart + inStart)
113113
} else {

core/src/main/scala/org/apache/spark/FutureAction.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ class SimpleFutureAction[T] private[spark](jobWaiter: JobWaiter[_], resultFunc:
168168
}
169169
}
170170

171-
def jobIds = Seq(jobWaiter.jobId)
171+
def jobIds: Seq[Int] = Seq(jobWaiter.jobId)
172172
}
173173

174174

@@ -276,7 +276,7 @@ class ComplexFutureAction[T] extends FutureAction[T] {
276276

277277
override def value: Option[Try[T]] = p.future.value
278278

279-
def jobIds = jobs
279+
def jobIds: Seq[Int] = jobs
280280

281281
}
282282

core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ private[spark] class HeartbeatReceiver(sc: SparkContext, scheduler: TaskSchedule
6565
super.preStart()
6666
}
6767

68-
override def receiveWithLogging = {
68+
override def receiveWithLogging: PartialFunction[Any, Unit] = {
6969
case Heartbeat(executorId, taskMetrics, blockManagerId) =>
7070
val unknownExecutor = !scheduler.executorHeartbeatReceived(
7171
executorId, taskMetrics, blockManagerId)

core/src/main/scala/org/apache/spark/MapOutputTracker.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ private[spark] class MapOutputTrackerMasterActor(tracker: MapOutputTrackerMaster
4343
extends Actor with ActorLogReceive with Logging {
4444
val maxAkkaFrameSize = AkkaUtils.maxFrameSizeBytes(conf)
4545

46-
override def receiveWithLogging = {
46+
override def receiveWithLogging: PartialFunction[Any, Unit] = {
4747
case GetMapOutputStatuses(shuffleId: Int) =>
4848
val hostPort = sender.path.address.hostPort
4949
logInfo("Asked to send map output locations for shuffle " + shuffleId + " to " + hostPort)

core/src/main/scala/org/apache/spark/Partitioner.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ object Partitioner {
7676
* produce an unexpected or incorrect result.
7777
*/
7878
class HashPartitioner(partitions: Int) extends Partitioner {
79-
def numPartitions = partitions
79+
def numPartitions: Int = partitions
8080

8181
def getPartition(key: Any): Int = key match {
8282
case null => 0
@@ -154,7 +154,7 @@ class RangePartitioner[K : Ordering : ClassTag, V](
154154
}
155155
}
156156

157-
def numPartitions = rangeBounds.length + 1
157+
def numPartitions: Int = rangeBounds.length + 1
158158

159159
private var binarySearch: ((Array[K], K) => Int) = CollectionsUtils.makeBinarySearch[K]
160160

core/src/main/scala/org/apache/spark/SerializableWritable.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,10 @@ import org.apache.spark.util.Utils
2828

2929
@DeveloperApi
3030
class SerializableWritable[T <: Writable](@transient var t: T) extends Serializable {
31-
def value = t
32-
override def toString = t.toString
31+
32+
def value: T = t
33+
34+
override def toString: String = t.toString
3335

3436
private def writeObject(out: ObjectOutputStream): Unit = Utils.tryOrIOException {
3537
out.defaultWriteObject()

0 commit comments

Comments
 (0)