Skip to content

Commit 8cb16a6

Browse files
committed
Merge remote-tracking branch 'upstream/master' into ldaonline
2 parents 62405cc + 2bf40c5 commit 8cb16a6

File tree

208 files changed

+1965
-918
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

208 files changed

+1965
-918
lines changed

assembly/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
<parent>
2222
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent_2.10</artifactId>
24-
<version>1.3.0-SNAPSHOT</version>
24+
<version>1.4.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

bagel/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
<parent>
2222
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent_2.10</artifactId>
24-
<version>1.3.0-SNAPSHOT</version>
24+
<version>1.4.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

core/pom.xml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
<parent>
2222
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent_2.10</artifactId>
24-
<version>1.3.0-SNAPSHOT</version>
24+
<version>1.4.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

@@ -275,7 +275,7 @@
275275
<dependency>
276276
<groupId>org.tachyonproject</groupId>
277277
<artifactId>tachyon-client</artifactId>
278-
<version>0.5.0</version>
278+
<version>0.6.1</version>
279279
<exclusions>
280280
<exclusion>
281281
<groupId>org.apache.hadoop</groupId>

core/src/main/scala/org/apache/spark/TaskState.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@ private[spark] object TaskState extends Enumeration {
2727

2828
type TaskState = Value
2929

30+
def isFailed(state: TaskState) = (LOST == state) || (FAILED == state)
31+
3032
def isFinished(state: TaskState) = FINISHED_STATES.contains(state)
3133

3234
def toMesos(state: TaskState): MesosTaskState = state match {

core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,12 +101,23 @@ class JavaRDD[T](val rdd: RDD[T])(implicit val classTag: ClassTag[T])
101101

102102
/**
103103
* Return a sampled subset of this RDD.
104+
*
105+
* @param withReplacement can elements be sampled multiple times (replaced when sampled out)
106+
* @param fraction expected size of the sample as a fraction of this RDD's size
107+
* without replacement: probability that each element is chosen; fraction must be [0, 1]
108+
* with replacement: expected number of times each element is chosen; fraction must be >= 0
104109
*/
105110
def sample(withReplacement: Boolean, fraction: Double): JavaRDD[T] =
106111
sample(withReplacement, fraction, Utils.random.nextLong)
107112

108113
/**
109114
* Return a sampled subset of this RDD.
115+
*
116+
* @param withReplacement can elements be sampled multiple times (replaced when sampled out)
117+
* @param fraction expected size of the sample as a fraction of this RDD's size
118+
* without replacement: probability that each element is chosen; fraction must be [0, 1]
119+
* with replacement: expected number of times each element is chosen; fraction must be >= 0
120+
* @param seed seed for the random number generator
110121
*/
111122
def sample(withReplacement: Boolean, fraction: Double, seed: Long): JavaRDD[T] =
112123
wrapRDD(rdd.sample(withReplacement, fraction, seed))

core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ import org.json4s.jackson.JsonMethods
3333

3434
import org.apache.spark.{Logging, SparkConf, SparkContext}
3535
import org.apache.spark.deploy.master.{RecoveryState, SparkCuratorUtil}
36+
import org.apache.spark.util.Utils
3637

3738
/**
3839
* This suite tests the fault tolerance of the Spark standalone scheduler, mainly the Master.
@@ -405,8 +406,7 @@ private object SparkDocker {
405406

406407
private def startNode(dockerCmd: ProcessBuilder) : (String, DockerId, File) = {
407408
val ipPromise = promise[String]()
408-
val outFile = File.createTempFile("fault-tolerance-test", "")
409-
outFile.deleteOnExit()
409+
val outFile = File.createTempFile("fault-tolerance-test", "", Utils.createTempDir())
410410
val outStream: FileWriter = new FileWriter(outFile)
411411
def findIpAndLog(line: String): Unit = {
412412
if (line.startsWith("CONTAINER_IP=")) {

core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,8 @@ private[history] class FsHistoryProvider(conf: SparkConf) extends ApplicationHis
233233
} catch {
234234
case e: Exception =>
235235
logError(
236-
s"Exception encountered when attempting to load application log ${fileStatus.getPath}")
236+
s"Exception encountered when attempting to load application log ${fileStatus.getPath}",
237+
e)
237238
None
238239
}
239240
}.toSeq.sortWith(compareAppInfo)

core/src/main/scala/org/apache/spark/package.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,5 +43,5 @@ package org.apache
4343

4444
package object spark {
4545
// For package docs only
46-
val SPARK_VERSION = "1.3.0-SNAPSHOT"
46+
val SPARK_VERSION = "1.4.0-SNAPSHOT"
4747
}

core/src/main/scala/org/apache/spark/rdd/RDD.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -377,6 +377,12 @@ abstract class RDD[T: ClassTag](
377377

378378
/**
379379
* Return a sampled subset of this RDD.
380+
*
381+
* @param withReplacement can elements be sampled multiple times (replaced when sampled out)
382+
* @param fraction expected size of the sample as a fraction of this RDD's size
383+
* without replacement: probability that each element is chosen; fraction must be [0, 1]
384+
* with replacement: expected number of times each element is chosen; fraction must be >= 0
385+
* @param seed seed for the random number generator
380386
*/
381387
def sample(withReplacement: Boolean,
382388
fraction: Double,

core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ private[spark] class CoarseMesosSchedulerBackend(
277277
coresByTaskId -= taskId
278278
}
279279
// If it was a failure, mark the slave as failed for blacklisting purposes
280-
if (state == MesosTaskState.TASK_FAILED || state == MesosTaskState.TASK_LOST) {
280+
if (TaskState.isFailed(TaskState.fromMesos(state))) {
281281
failuresBySlaveId(slaveId) = failuresBySlaveId.getOrElse(slaveId, 0) + 1
282282
if (failuresBySlaveId(slaveId) >= MAX_SLAVE_FAILURES) {
283283
logInfo("Blacklisting Mesos slave " + slaveId + " due to too many failures; " +

0 commit comments

Comments
 (0)