Skip to content

Commit 66dd9cb

Browse files
author
Robert Kruszewski
committed
Resolve conflicts keeping our k8s code
1 parent 03e1901 commit 66dd9cb

File tree

23 files changed

+9
-2370
lines changed

23 files changed

+9
-2370
lines changed

.travis.yml

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -55,14 +55,9 @@ notifications:
5555
# 5. Run maven build before running lints.
5656
install:
5757
- export MAVEN_SKIP_RC=1
58-
<<<<<<< HEAD
59-
- build/mvn ${PHASE} ${PROFILES} ${MODULES} ${ARGS}
60-
# 6. Run lints.
61-
=======
6258
- build/mvn -T 4 -q -DskipTests -Pkubernetes -Pmesos -Pyarn -Pkinesis-asl -Phive -Phive-thriftserver install
6359

6460
# 6. Run lint-java.
65-
>>>>>>> master
6661
script:
6762
- dev/lint-java
6863
- dev/lint-scala

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 6 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,13 @@
1818
package org.apache.spark.deploy
1919

2020
import java.io._
21-
import java.lang.reflect.{InvocationTargetException, Modifier, UndeclaredThrowableException}
21+
import java.lang.reflect.{InvocationTargetException, UndeclaredThrowableException}
2222
import java.net.URL
2323
import java.security.PrivilegedExceptionAction
2424
import java.text.ParseException
2525

2626
import scala.annotation.tailrec
27-
import scala.collection.mutable.{ArrayBuffer, HashMap, Map}
27+
import scala.collection.mutable.ArrayBuffer
2828
import scala.util.{Properties, Try}
2929

3030
import org.apache.commons.lang3.StringUtils
@@ -77,11 +77,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
7777
private val MESOS = 4
7878
private val LOCAL = 8
7979
private val KUBERNETES = 16
80-
<<<<<<< HEAD
81-
private val ALL_CLUSTER_MGRS = YARN | STANDALONE | MESOS | KUBERNETES | LOCAL
82-
=======
8380
private val ALL_CLUSTER_MGRS = YARN | STANDALONE | MESOS | LOCAL | KUBERNETES
84-
>>>>>>> master
8581

8682
// Deploy modes
8783
private val CLIENT = 1
@@ -351,7 +347,6 @@ object SparkSubmit extends CommandLineUtils with Logging {
351347
val isMesosCluster = clusterManager == MESOS && deployMode == CLUSTER
352348
val isKubernetesCluster = clusterManager == KUBERNETES && deployMode == CLUSTER
353349
val isStandAloneCluster = clusterManager == STANDALONE && deployMode == CLUSTER
354-
val isKubernetesCluster = clusterManager == KUBERNETES && deployMode == CLUSTER
355350

356351
if (!isMesosCluster && !isStandAloneCluster) {
357352
// Resolve maven dependencies if there are any and add classpath to jars. Add them to py-files
@@ -586,16 +581,11 @@ object SparkSubmit extends CommandLineUtils with Logging {
586581
OptionAssigner(args.principal, YARN, ALL_DEPLOY_MODES, confKey = "spark.yarn.principal"),
587582
OptionAssigner(args.keytab, YARN, ALL_DEPLOY_MODES, confKey = "spark.yarn.keytab"),
588583

589-
<<<<<<< HEAD
590584
OptionAssigner(args.kubernetesNamespace, KUBERNETES, ALL_DEPLOY_MODES,
591585
confKey = "spark.kubernetes.namespace"),
592586

593-
// Other options
594-
OptionAssigner(args.executorCores, STANDALONE | YARN, ALL_DEPLOY_MODES,
595-
=======
596587
// Other options
597588
OptionAssigner(args.executorCores, STANDALONE | YARN | KUBERNETES, ALL_DEPLOY_MODES,
598-
>>>>>>> master
599589
confKey = "spark.executor.cores"),
600590
OptionAssigner(args.executorMemory, STANDALONE | MESOS | YARN | KUBERNETES, ALL_DEPLOY_MODES,
601591
confKey = "spark.executor.memory"),
@@ -606,11 +596,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
606596
OptionAssigner(args.jars, LOCAL, CLIENT, confKey = "spark.jars"),
607597
OptionAssigner(args.jars, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
608598
confKey = "spark.jars"),
609-
<<<<<<< HEAD
610-
OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN, CLUSTER,
611-
=======
612599
OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN | KUBERNETES, CLUSTER,
613-
>>>>>>> master
614600
confKey = "spark.driver.memory"),
615601
OptionAssigner(args.driverCores, STANDALONE | MESOS | YARN | KUBERNETES, CLUSTER,
616602
confKey = "spark.driver.cores"),
@@ -748,32 +734,22 @@ object SparkSubmit extends CommandLineUtils with Logging {
748734
}
749735

750736
if (isKubernetesCluster) {
751-
<<<<<<< HEAD
752-
childMainClass = "org.apache.spark.deploy.k8s.submit.Client"
737+
childMainClass = KUBERNETES_CLUSTER_SUBMIT_CLASS
753738
if (args.isPython) {
754739
childArgs ++= Array("--primary-py-file", args.primaryResource)
755740
childArgs ++= Array("--main-class", "org.apache.spark.deploy.PythonRunner")
756741
if (args.pyFiles != null) {
757742
childArgs ++= Array("--other-py-files", args.pyFiles)
758743
}
759744
} else {
760-
childArgs ++= Array("--primary-java-resource", args.primaryResource)
745+
if (args.primaryResource != SparkLauncher.NO_RESOURCE) {
746+
childArgs ++= Array("--primary-java-resource", args.primaryResource)
747+
}
761748
childArgs ++= Array("--main-class", args.mainClass)
762749
}
763750
args.childArgs.foreach { arg =>
764751
childArgs += "--arg"
765752
childArgs += arg
766-
=======
767-
childMainClass = KUBERNETES_CLUSTER_SUBMIT_CLASS
768-
if (args.primaryResource != SparkLauncher.NO_RESOURCE) {
769-
childArgs ++= Array("--primary-java-resource", args.primaryResource)
770-
}
771-
childArgs ++= Array("--main-class", args.mainClass)
772-
if (args.childArgs != null) {
773-
args.childArgs.foreach { arg =>
774-
childArgs += ("--arg", arg)
775-
}
776-
>>>>>>> master
777753
}
778754
}
779755

core/src/main/scala/org/apache/spark/internal/config/package.scala

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,6 @@ package object config {
240240
.stringConf
241241
.createOptional
242242

243-
<<<<<<< HEAD
244243
private[spark] val CONDA_BOOTSTRAP_PACKAGES = ConfigBuilder("spark.conda.bootstrapPackages")
245244
.doc("The packages that will be added to the conda environment. "
246245
+ "Only relevant when main class is CondaRunner.")
@@ -272,13 +271,6 @@ package object config {
272271
.toSequence
273272
.createWithDefault(Nil)
274273

275-
// To limit memory usage, we only track information for a fixed number of tasks
276-
private[spark] val UI_RETAINED_TASKS = ConfigBuilder("spark.ui.retainedTasks")
277-
.intConf
278-
.createWithDefault(100000)
279-
280-
=======
281-
>>>>>>> master
282274
// To limit how many applications are shown in the History Server summary ui
283275
private[spark] val HISTORY_UI_MAX_APPS =
284276
ConfigBuilder("spark.history.ui.maxApplications").intConf.createWithDefault(Integer.MAX_VALUE)

dev/deps/spark-deps-hadoop-2.6

Lines changed: 0 additions & 190 deletions
This file was deleted.

0 commit comments

Comments
 (0)