Skip to content

Commit ffe272d

Browse files
committed
Revert "SPARK-1099:Spark's local mode should probably respect spark.cores.max by default"
This reverts commit 1678931. Jenkins was not run for this PR.
1 parent 1678931 commit ffe272d

File tree

3 files changed

+6
-22
lines changed

3 files changed

+6
-22
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1262,10 +1262,7 @@ object SparkContext extends Logging {
12621262
master match {
12631263
case "local" =>
12641264
val scheduler = new TaskSchedulerImpl(sc, MAX_LOCAL_TASK_FAILURES, isLocal = true)
1265-
// Use user specified in config, up to all available cores
1266-
val realCores = Runtime.getRuntime.availableProcessors()
1267-
val toUseCores = math.min(sc.conf.getInt("spark.cores.max", realCores), realCores)
1268-
val backend = new LocalBackend(scheduler, toUseCores)
1265+
val backend = new LocalBackend(scheduler, 1)
12691266
scheduler.initialize(backend)
12701267
scheduler
12711268

core/src/test/scala/org/apache/spark/FileSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ import org.apache.spark.SparkContext._
3434
class FileSuite extends FunSuite with LocalSparkContext {
3535

3636
test("text files") {
37-
sc = new SparkContext("local[1]", "test")
37+
sc = new SparkContext("local", "test")
3838
val tempDir = Files.createTempDir()
3939
val outputDir = new File(tempDir, "output").getAbsolutePath
4040
val nums = sc.makeRDD(1 to 4)
@@ -176,7 +176,7 @@ class FileSuite extends FunSuite with LocalSparkContext {
176176

177177
test("write SequenceFile using new Hadoop API") {
178178
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat
179-
sc = new SparkContext("local[1]", "test")
179+
sc = new SparkContext("local", "test")
180180
val tempDir = Files.createTempDir()
181181
val outputDir = new File(tempDir, "output").getAbsolutePath
182182
val nums = sc.makeRDD(1 to 3).map(x => (new IntWritable(x), new Text("a" * x)))

core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala

Lines changed: 3 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,10 @@ import org.apache.spark.scheduler.local.LocalBackend
2727
class SparkContextSchedulerCreationSuite
2828
extends FunSuite with PrivateMethodTester with LocalSparkContext with Logging {
2929

30-
def createTaskScheduler(master: String, conf: SparkConf = new SparkConf()): TaskSchedulerImpl = {
30+
def createTaskScheduler(master: String): TaskSchedulerImpl = {
3131
// Create local SparkContext to setup a SparkEnv. We don't actually want to start() the
3232
// real schedulers, so we don't want to create a full SparkContext with the desired scheduler.
33-
sc = new SparkContext("local", "test", conf)
33+
sc = new SparkContext("local", "test")
3434
val createTaskSchedulerMethod = PrivateMethod[TaskScheduler]('createTaskScheduler)
3535
val sched = SparkContext invokePrivate createTaskSchedulerMethod(sc, master)
3636
sched.asInstanceOf[TaskSchedulerImpl]
@@ -44,26 +44,13 @@ class SparkContextSchedulerCreationSuite
4444
}
4545

4646
test("local") {
47-
var conf = new SparkConf()
48-
conf.set("spark.cores.max", "1")
49-
val sched = createTaskScheduler("local", conf)
47+
val sched = createTaskScheduler("local")
5048
sched.backend match {
5149
case s: LocalBackend => assert(s.totalCores === 1)
5250
case _ => fail()
5351
}
5452
}
5553

56-
test("local-cores-exceed") {
57-
val cores = Runtime.getRuntime.availableProcessors() + 1
58-
var conf = new SparkConf()
59-
conf.set("spark.cores.max", cores.toString)
60-
val sched = createTaskScheduler("local", conf)
61-
sched.backend match {
62-
case s: LocalBackend => assert(s.totalCores === Runtime.getRuntime.availableProcessors())
63-
case _ => fail()
64-
}
65-
}
66-
6754
test("local-n") {
6855
val sched = createTaskScheduler("local[5]")
6956
assert(sched.maxTaskFailures === 1)

0 commit comments

Comments
 (0)