Skip to content

Commit 218b8c1

Browse files
authored
Merge pull request apache#34 from skonto/extended_suite
Add lightbend custom suite
2 parents f825847 + 44c86b2 commit 218b8c1

File tree

15 files changed

+664
-7
lines changed

15 files changed

+664
-7
lines changed

resource-managers/kubernetes/docker/src/main/dockerfiles/spark/entrypoint.sh

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,14 @@ set +e
2727
uidentry=$(getent passwd $myuid)
2828
set -e
2929

30+
if [ -n "$HADOOP_CONFIG_URL" ]; then
31+
echo "Setting up hadoop config files...."
32+
mkdir -p /etc/hadoop/conf
33+
wget $HADOOP_CONFIG_URL/core-site.xml -P /etc/hadoop/conf
34+
wget $HADOOP_CONFIG_URL/hdfs-site.xml -P /etc/hadoop/conf
35+
export HADOOP_CONF_DIR=/etc/hadoop/conf
36+
fi
37+
3038
# If there is no passwd entry for the container UID, attempt to create one
3139
if [ -z "$uidentry" ] ; then
3240
if [ -w /etc/passwd ] ; then

resource-managers/kubernetes/integration-tests/dev/dev-run-integration-tests.sh

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,9 @@ IMAGE_TAG="N/A"
2727
SPARK_MASTER=
2828
NAMESPACE=
2929
SERVICE_ACCOUNT=
30+
EXTRA_JARS=
31+
INCLUDE_TAGS=
32+
EXCLUDE_TAGS=
3033

3134
# Parse arguments
3235
while (( "$#" )); do
@@ -59,6 +62,18 @@ while (( "$#" )); do
5962
SERVICE_ACCOUNT="$2"
6063
shift
6164
;;
65+
--extra-jars)
66+
EXTRA_JARS="$2"
67+
shift
68+
;;
69+
--include-tags)
70+
INCLUDE_TAGS="$2"
71+
shift
72+
;;
73+
--exclude-tags)
74+
EXCLUDE_TAGS="$2"
75+
shift
76+
;;
6277
*)
6378
break
6479
;;
@@ -90,4 +105,19 @@ then
90105
properties=( ${properties[@]} -Dspark.kubernetes.test.master=$SPARK_MASTER )
91106
fi
92107

108+
if [ -n $EXTRA_JARS ];
109+
then
110+
properties=( ${properties[@]} -Dspark.kubernetes.test.extraJars=$EXTRA_JARS )
111+
fi
112+
113+
if [ -n $EXCLUDE_TAGS ];
114+
then
115+
properties=( ${properties[@]} -Dtest.exclude.tags=$EXCLUDE_TAGS )
116+
fi
117+
118+
if [ -n $INCLUDE_TAGS ];
119+
then
120+
properties=( ${properties[@]} -Dtest.include.tags=$INCLUDE_TAGS )
121+
fi
122+
93123
../../../build/mvn integration-test ${properties[@]}

resource-managers/kubernetes/integration-tests/pom.xml

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@
4040
<spark.kubernetes.test.deployMode>minikube</spark.kubernetes.test.deployMode>
4141
<spark.kubernetes.test.imageRepo>docker.io/kubespark</spark.kubernetes.test.imageRepo>
4242
<test.exclude.tags></test.exclude.tags>
43+
<test.include.tags></test.include.tags>
4344
</properties>
4445
<packaging>jar</packaging>
4546
<name>Spark Project Kubernetes Integration Tests</name>
@@ -102,7 +103,18 @@
102103
</execution>
103104
</executions>
104105
</plugin>
106+
105107
<plugin>
108+
<groupId>org.apache.maven.plugins</groupId>
109+
<artifactId>maven-surefire-plugin</artifactId>
110+
<version>2.7</version>
111+
<configuration>
112+
<skipTests>true</skipTests>
113+
</configuration>
114+
</plugin>
115+
<plugin>
116+
117+
106118
<!-- Triggers scalatest plugin in the integration-test phase instead of
107119
the test phase. -->
108120
<groupId>org.scalatest</groupId>
@@ -112,11 +124,12 @@
112124
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
113125
<junitxml>.</junitxml>
114126
<filereports>SparkTestSuite.txt</filereports>
115-
<argLine>-ea -Xmx3g -XX:ReservedCodeCacheSize=512m ${extraScalaTestArgs}</argLine>
127+
<argLine>-ea -Xmx3g -XX:ReservedCodeCacheSize=512m ${extraScalaTestArgs} </argLine>
116128
<stderr/>
117129
<systemProperties>
118130
<log4j.configuration>file:src/test/resources/log4j.properties</log4j.configuration>
119131
<java.awt.headless>true</java.awt.headless>
132+
<spark.kubernetes.test.extraJars>${spark.kubernetes.test.extraJars}</spark.kubernetes.test.extraJars>
120133
<spark.kubernetes.test.imageTagFile>${spark.kubernetes.test.imageTagFile}</spark.kubernetes.test.imageTagFile>
121134
<spark.kubernetes.test.unpackSparkDir>${spark.kubernetes.test.unpackSparkDir}</spark.kubernetes.test.unpackSparkDir>
122135
<spark.kubernetes.test.imageRepo>${spark.kubernetes.test.imageRepo}</spark.kubernetes.test.imageRepo>
@@ -126,6 +139,7 @@
126139
<spark.kubernetes.test.serviceAccountName>${spark.kubernetes.test.serviceAccountName}</spark.kubernetes.test.serviceAccountName>
127140
</systemProperties>
128141
<tagsToExclude>${test.exclude.tags}</tagsToExclude>
142+
<tagsToInclude>${test.include.tags}</tagsToInclude>
129143
</configuration>
130144
<executions>
131145
<execution>

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,13 +26,16 @@ import scala.collection.JavaConverters._
2626
import com.google.common.io.PatternFilenameFilter
2727
import io.fabric8.kubernetes.api.model.{Container, Pod}
2828
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}
29+
import org.scalatest.Tag
2930
import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
3031
import org.scalatest.time.{Minutes, Seconds, Span}
3132

3233
import org.apache.spark.SparkFunSuite
3334
import org.apache.spark.deploy.k8s.integrationtest.backend.{IntegrationTestBackend, IntegrationTestBackendFactory}
3435
import org.apache.spark.deploy.k8s.integrationtest.config._
3536

37+
object NoDCOS extends Tag("noDcos")
38+
3639
private[spark] class KubernetesSuite extends SparkFunSuite
3740
with BeforeAndAfterAll with BeforeAndAfter {
3841

@@ -109,7 +112,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite
109112
runSparkPiAndVerifyCompletion()
110113
}
111114

112-
test("Run SparkPi with a master URL without a scheme.") {
115+
test("Run SparkPi with a master URL without a scheme.", NoDCOS) {
113116
val url = kubernetesTestComponents.kubernetesClient.getMasterUrl
114117
val k8sMasterUrl = if (url.getPort < 0) {
115118
s"k8s://${url.getHost}"

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -105,16 +105,13 @@ private[spark] object SparkAppLauncher extends Logging {
105105
sparkHomeDir: Path): Unit = {
106106
val sparkSubmitExecutable = sparkHomeDir.resolve(Paths.get("bin", "spark-submit"))
107107
logInfo(s"Launching a spark app with arguments $appArguments and conf $appConf")
108-
val appArgsArray =
109-
if (appArguments.appArgs.length > 0) Array(appArguments.appArgs.mkString(" "))
110-
else Array[String]()
111108
val commandLine = (Array(sparkSubmitExecutable.toFile.getAbsolutePath,
112109
"--deploy-mode", "cluster",
113110
"--class", appArguments.mainClass,
114111
"--master", appConf.get("spark.master")
115112
) ++ appConf.toStringArray :+
116113
appArguments.mainAppResource) ++
117-
appArgsArray
114+
appArguments.appArgs
118115
ProcessUtils.executeProcess(commandLine, timeoutSecs)
119116
}
120117
}

0 commit comments

Comments
 (0)