Skip to content

Commit feecd5f

Browse files
susanxhuynhArthur Rand
authored andcommitted
Added "smoke" mark for smoke tests, to be run against development versions of DC/OS. Includes all tests except HDFS ones. (apache#238)
1 parent a52e08e commit feecd5f

File tree

2 files changed

+10
-0
lines changed

2 files changed

+10
-0
lines changed

tests/test_kafka.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,7 @@ def setup_spark(kerberized_kafka, configure_security_spark, configure_universe):
107107

108108

109109
@pytest.mark.sanity
110+
@pytest.mark.smoke
110111
@pytest.mark.skipif(not utils.kafka_enabled(), reason='KAFKA_ENABLED is false')
111112
def test_spark_and_kafka():
112113
kerberos_flag = "true" if KERBERIZED_KAFKA else "false" # flag for using kerberized kafka given to app

tests/test_spark.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ def setup_spark(configure_security, configure_universe):
4646

4747
@pytest.mark.xfail(utils.is_strict(), reason="Currently fails in strict mode")
4848
@pytest.mark.sanity
49+
@pytest.mark.smoke
4950
def test_jar(app_name=utils.SPARK_APP_NAME):
5051
master_url = ("https" if utils.is_strict() else "http") + "://leader.mesos:5050"
5152
spark_job_runner_args = '{} dcos \\"*\\" spark:only 2 --auth-token={}'.format(
@@ -60,6 +61,7 @@ def test_jar(app_name=utils.SPARK_APP_NAME):
6061

6162

6263
@pytest.mark.sanity
64+
@pytest.mark.smoke
6365
def test_rpc_auth():
6466
secret_name = "sparkauth"
6567

@@ -94,6 +96,7 @@ def test_sparkPi(app_name=utils.SPARK_APP_NAME):
9496

9597

9698
@pytest.mark.sanity
99+
@pytest.mark.smoke
97100
def test_python():
98101
python_script_path = os.path.join(THIS_DIR, 'jobs', 'python', 'pi_with_include.py')
99102
python_script_url = utils.upload_file(python_script_path)
@@ -106,6 +109,7 @@ def test_python():
106109

107110

108111
@pytest.mark.sanity
112+
@pytest.mark.smoke
109113
def test_r():
110114
r_script_path = os.path.join(THIS_DIR, 'jobs', 'R', 'dataframe.R')
111115
r_script_url = utils.upload_file(r_script_path)
@@ -125,6 +129,7 @@ def test_cni():
125129

126130
#@pytest.mark.skip("Enable when SPARK-21694 is merged and released in DC/OS Spark")
127131
@pytest.mark.sanity
132+
@pytest.mark.smoke
128133
def test_cni_labels():
129134
driver_task_id = utils.submit_job(app_url=utils.SPARK_EXAMPLES,
130135
app_args="3000", # Long enough to examine the Driver's & Executor's task infos
@@ -166,6 +171,7 @@ def _check_task_network_info(task):
166171

167172

168173
@pytest.mark.sanity
174+
@pytest.mark.smoke
169175
def test_s3():
170176
def make_credential_secret(envvar, secret_path):
171177
rc, stdout, stderr = sdk_cmd.run_raw_cli("security secrets create {p} -v {e}"
@@ -234,6 +240,7 @@ def make_credential_secret(envvar, secret_path):
234240
# Skip DC/OS < 1.10, because it doesn't have adminrouter support for service groups.
235241
@pytest.mark.skipif('shakedown.dcos_version_less_than("1.10")')
236242
@pytest.mark.sanity
243+
@pytest.mark.smoke
237244
def test_marathon_group():
238245
app_id = utils.FOLDERED_SPARK_APP_NAME
239246
options = {"service": {"name": app_id}}
@@ -243,6 +250,7 @@ def test_marathon_group():
243250
#shakedown.uninstall_package_and_wait(SPARK_PACKAGE_NAME, app_id)
244251

245252

253+
246254
@pytest.mark.sanity
247255
def test_cli_multiple_spaces():
248256
utils.run_tests(app_url=utils.SPARK_EXAMPLES,
@@ -256,6 +264,7 @@ def test_cli_multiple_spaces():
256264
@pytest.mark.skipif('shakedown.dcos_version_less_than("1.10")')
257265
@sdk_utils.dcos_ee_only
258266
@pytest.mark.sanity
267+
@pytest.mark.smoke
259268
def test_driver_executor_tls():
260269
'''
261270
Put keystore and truststore as secrets in DC/OS secret store.

0 commit comments

Comments
 (0)