Skip to content

Commit f1edf88

Browse files
committed
Revert "add/remove pytest marks to TF tests"
This reverts commit b522f93.
1 parent b522f93 commit f1edf88

18 files changed

+191
-784
lines changed

tank/tf/conftest.py

Lines changed: 0 additions & 3 deletions
This file was deleted.

tank/tf/hf_masked_lm/albert-base-v2_test.py

Lines changed: 11 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -3,65 +3,30 @@
33
from shark.iree_utils import check_device_drivers
44
from shark.shark_inference import SharkInference
55

6-
import iree.compiler as ireec
76
import unittest
87
import pytest
9-
import numpy as np
10-
import tempfile
118

129

1310
class AlbertBaseModuleTester:
14-
15-
def __init__(
16-
self,
17-
save_temps=False
18-
):
19-
self.save_temps = save_temps
2011

2112
def create_and_check_module(self, dynamic, device):
2213
model, input, act_out = get_causal_lm_model("albert-base-v2")
23-
save_temps = self.save_temps
24-
if save_temps == True:
25-
if dynamic == True:
26-
repro_dir = f"albert_base_v2_dynamic_{device}"
27-
else:
28-
repro_dir = f"albert_base_v2_static_{device}"
29-
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
30-
np.set_printoptions(threshold=np.inf)
31-
np.save(f"{temp_dir}/input1.npy", input[0])
32-
np.save(f"{temp_dir}/input2.npy", input[1])
33-
exp_out = act_out.numpy()
34-
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
35-
out_file.write(np.array2string(exp_out))
36-
with ireec.tools.TempFileSaver(temp_dir):
37-
shark_module = SharkInference(model, (input,),
38-
device=device,
39-
dynamic=dynamic,
40-
jit_trace=True)
41-
shark_module.set_frontend("tensorflow")
42-
shark_module.compile()
43-
results = shark_module.forward((input))
44-
assert True == compare_tensors_tf(act_out, results)
45-
46-
else:
47-
shark_module = SharkInference(model, (input,),
48-
device=device,
49-
dynamic=dynamic,
50-
jit_trace=True)
51-
shark_module.set_frontend("tensorflow")
52-
shark_module.compile()
53-
results = shark_module.forward((input))
54-
assert True == compare_tensors_tf(act_out, results)
14+
shark_module = SharkInference(model, (input,),
15+
device=device,
16+
dynamic=dynamic,
17+
jit_trace=True)
18+
shark_module.set_frontend("tensorflow")
19+
shark_module.compile()
20+
results = shark_module.forward((input))
21+
assert True == compare_tensors_tf(act_out, results)
5522

5623

5724
class AlbertBaseModuleTest(unittest.TestCase):
5825

59-
@pytest.fixture(autouse=True)
60-
def configure(self, pytestconfig):
61-
self.module_tester = AlbertBaseModuleTester(self)
62-
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
26+
def setUp(self):
27+
self.module_tester = AlbertBaseModuleTester()
6328

64-
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
29+
@pytest.mark.xfail
6530
def test_module_static_cpu(self):
6631
dynamic = False
6732
device = "cpu"

tank/tf/hf_masked_lm/bert-base-uncased_test.py

Lines changed: 11 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -3,65 +3,30 @@
33
from shark.iree_utils import check_device_drivers
44
from shark.shark_inference import SharkInference
55

6-
import iree.compiler as ireec
76
import unittest
87
import pytest
9-
import numpy as np
10-
import tempfile
118

129

1310
class BertBaseUncasedModuleTester:
14-
15-
def __init__(
16-
self,
17-
save_temps=False
18-
):
19-
self.save_temps = save_temps
2011

2112
def create_and_check_module(self, dynamic, device):
2213
model, input, act_out = get_causal_lm_model("bert-base-uncased")
23-
save_temps = self.save_temps
24-
if save_temps == True:
25-
if dynamic == True:
26-
repro_dir = f"bert_base_uncased_dynamic_{device}"
27-
else:
28-
repro_dir = f"bert_base_uncased_static_{device}"
29-
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
30-
np.set_printoptions(threshold=np.inf)
31-
np.save(f"{temp_dir}/input1.npy", input[0])
32-
np.save(f"{temp_dir}/input2.npy", input[1])
33-
exp_out = act_out.numpy()
34-
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
35-
out_file.write(np.array2string(exp_out))
36-
with ireec.tools.TempFileSaver(temp_dir):
37-
shark_module = SharkInference(model, (input,),
38-
device=device,
39-
dynamic=dynamic,
40-
jit_trace=True)
41-
shark_module.set_frontend("tensorflow")
42-
shark_module.compile()
43-
results = shark_module.forward((input))
44-
assert True == compare_tensors_tf(act_out, results)
45-
46-
else:
47-
shark_module = SharkInference(model, (input,),
48-
device=device,
49-
dynamic=dynamic,
50-
jit_trace=True)
51-
shark_module.set_frontend("tensorflow")
52-
shark_module.compile()
53-
results = shark_module.forward((input))
54-
assert True == compare_tensors_tf(act_out, results)
14+
shark_module = SharkInference(model, (input,),
15+
device=device,
16+
dynamic=dynamic,
17+
jit_trace=True)
18+
shark_module.set_frontend("tensorflow")
19+
shark_module.compile()
20+
results = shark_module.forward((input))
21+
assert True == compare_tensors_tf(act_out, results)
5522

5623

5724
class BertBaseUncasedModuleTest(unittest.TestCase):
5825

59-
@pytest.fixture(autouse=True)
60-
def configure(self, pytestconfig):
61-
self.module_tester = BertBaseUncasedModuleTester(self)
62-
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
26+
def setUp(self):
27+
self.module_tester = BertBaseUncasedModuleTester()
6328

64-
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
29+
@pytest.mark.xfail
6530
def test_module_static_cpu(self):
6631
dynamic = False
6732
device = "cpu"

tank/tf/hf_masked_lm/camembert-base_test.py

Lines changed: 11 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -3,65 +3,30 @@
33
from shark.iree_utils import check_device_drivers
44
from shark.shark_inference import SharkInference
55

6-
import iree.compiler as ireec
76
import unittest
87
import pytest
9-
import numpy as np
10-
import tempfile
118

129

1310
class CamemBertModuleTester:
14-
15-
def __init__(
16-
self,
17-
save_temps=False
18-
):
19-
self.save_temps = save_temps
2011

2112
def create_and_check_module(self, dynamic, device):
2213
model, input, act_out = get_causal_lm_model("camembert-base")
23-
save_temps = self.save_temps
24-
if save_temps == True:
25-
if dynamic == True:
26-
repro_dir = f"camembert-base_dynamic_{device}"
27-
else:
28-
repro_dir = f"camembert-base_static_{device}"
29-
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
30-
np.set_printoptions(threshold=np.inf)
31-
np.save(f"{temp_dir}/input1.npy", input[0])
32-
np.save(f"{temp_dir}/input2.npy", input[1])
33-
exp_out = act_out.numpy()
34-
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
35-
out_file.write(np.array2string(exp_out))
36-
with ireec.tools.TempFileSaver(temp_dir):
37-
shark_module = SharkInference(model, (input,),
38-
device=device,
39-
dynamic=dynamic,
40-
jit_trace=True)
41-
shark_module.set_frontend("tensorflow")
42-
shark_module.compile()
43-
results = shark_module.forward((input))
44-
assert True == compare_tensors_tf(act_out, results)
45-
46-
else:
47-
shark_module = SharkInference(model, (input,),
48-
device=device,
49-
dynamic=dynamic,
50-
jit_trace=True)
51-
shark_module.set_frontend("tensorflow")
52-
shark_module.compile()
53-
results = shark_module.forward((input))
54-
assert True == compare_tensors_tf(act_out, results)
14+
shark_module = SharkInference(model, (input,),
15+
device=device,
16+
dynamic=dynamic,
17+
jit_trace=True)
18+
shark_module.set_frontend("tensorflow")
19+
shark_module.compile()
20+
results = shark_module.forward((input))
21+
assert True == compare_tensors_tf(act_out, results)
5522

5623

5724
class CamemBertModuleTest(unittest.TestCase):
5825

59-
@pytest.fixture(autouse=True)
60-
def configure(self, pytestconfig):
61-
self.module_tester=CamemBertModuleTester(self)
62-
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
26+
def setUp(self):
27+
self.module_tester = CamemBertModuleTester()
6328

64-
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
29+
@pytest.mark.xfail
6530
def test_module_static_cpu(self):
6631
dynamic = False
6732
device = "cpu"

tank/tf/hf_masked_lm/convbert-base-turkish-cased_test.py

Lines changed: 11 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -3,66 +3,31 @@
33
from shark.iree_utils import check_device_drivers
44
from shark.shark_inference import SharkInference
55

6-
import iree.compiler as ireec
76
import unittest
87
import pytest
9-
import numpy as np
10-
import tempfile
118

129

1310
class ConvBertModuleTester:
14-
15-
def __init__(
16-
self,
17-
save_temps=False
18-
):
19-
self.save_temps = save_temps
2011

2112
def create_and_check_module(self, dynamic, device):
2213
model, input, act_out = get_causal_lm_model(
2314
"dbmdz/convbert-base-turkish-cased")
24-
save_temps = self.save_temps
25-
if save_temps == True:
26-
if dynamic == True:
27-
repro_dir = f"convbert_base_dynamic_{device}"
28-
else:
29-
repro_dir = f"convbert_base_static_{device}"
30-
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
31-
np.set_printoptions(threshold=np.inf)
32-
np.save(f"{temp_dir}/input1.npy", input[0])
33-
np.save(f"{temp_dir}/input2.npy", input[1])
34-
exp_out = act_out.numpy()
35-
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
36-
out_file.write(np.array2string(exp_out))
37-
with ireec.tools.TempFileSaver(temp_dir):
38-
shark_module = SharkInference(model, (input,),
39-
device=device,
40-
dynamic=dynamic,
41-
jit_trace=True)
42-
shark_module.set_frontend("tensorflow")
43-
shark_module.compile()
44-
results = shark_module.forward((input))
45-
assert True == compare_tensors_tf(act_out, results)
46-
47-
else:
48-
shark_module = SharkInference(model, (input,),
49-
device=device,
50-
dynamic=dynamic,
51-
jit_trace=True)
52-
shark_module.set_frontend("tensorflow")
53-
shark_module.compile()
54-
results = shark_module.forward((input))
55-
assert True == compare_tensors_tf(act_out, results)
15+
shark_module = SharkInference(model, (input,),
16+
device=device,
17+
dynamic=dynamic,
18+
jit_trace=True)
19+
shark_module.set_frontend("tensorflow")
20+
shark_module.compile()
21+
results = shark_module.forward((input))
22+
assert True == compare_tensors_tf(act_out, results)
5623

5724

5825
class ConvBertModuleTest(unittest.TestCase):
5926

60-
@pytest.fixture(autouse=True)
61-
def configure(self, pytestconfig):
62-
self.module_tester = ConvBertModuleTester(self)
63-
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
27+
def setUp(self):
28+
self.module_tester = ConvBertModuleTester()
6429

65-
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
30+
@pytest.mark.xfail
6631
def test_module_static_cpu(self):
6732
dynamic = False
6833
device = "cpu"

tank/tf/hf_masked_lm/deberta-base_test.py

Lines changed: 11 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -3,63 +3,28 @@
33
from shark.iree_utils import check_device_drivers
44
from shark.shark_inference import SharkInference
55

6-
import iree.compiler as ireec
76
import unittest
87
import pytest
9-
import numpy as np
10-
import tempfile
118

129

1310
class DebertaModuleTester:
14-
15-
def __init__(
16-
self,
17-
save_temps=False
18-
):
19-
self.save_temps = save_temps
2011

2112
def create_and_check_module(self, dynamic, device):
2213
model, input, act_out = get_causal_lm_model("microsoft/deberta-base")
23-
save_temps = self.save_temps
24-
if save_temps == True:
25-
if dynamic == True:
26-
repro_dir = f"deberta-base_dynamic_{device}"
27-
else:
28-
repro_dir = f"deberta-base_static_{device}"
29-
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
30-
np.set_printoptions(threshold=np.inf)
31-
np.save(f"{temp_dir}/input1.npy", input[0])
32-
np.save(f"{temp_dir}/input2.npy", input[1])
33-
exp_out = act_out.numpy()
34-
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
35-
out_file.write(np.array2string(exp_out))
36-
with ireec.tools.TempFileSaver(temp_dir):
37-
shark_module = SharkInference(model, (input,),
38-
device=device,
39-
dynamic=dynamic,
40-
jit_trace=True)
41-
shark_module.set_frontend("tensorflow")
42-
shark_module.compile()
43-
results = shark_module.forward((input))
44-
assert True == compare_tensors_tf(act_out, results)
45-
46-
else:
47-
shark_module = SharkInference(model, (input,),
48-
device=device,
49-
dynamic=dynamic,
50-
jit_trace=True)
51-
shark_module.set_frontend("tensorflow")
52-
shark_module.compile()
53-
results = shark_module.forward((input))
54-
assert True == compare_tensors_tf(act_out, results)
14+
shark_module = SharkInference(model, (input,),
15+
device=device,
16+
dynamic=dynamic,
17+
jit_trace=True)
18+
shark_module.set_frontend("tensorflow")
19+
shark_module.compile()
20+
results = shark_module.forward((input))
21+
assert True == compare_tensors_tf(act_out, results)
5522

5623

5724
class DebertaModuleTest(unittest.TestCase):
58-
59-
@pytest.fixture(autouse=True)
60-
def configure(self, pytestconfig):
61-
self.module_tester = DebertaModuleTester(self)
62-
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
25+
26+
def setUp(self):
27+
self.module_tester = DebertaModuleTester()
6328

6429
@pytest.mark.xfail
6530
@pytest.mark.skip(reason="deberta currently failing in the lowering passes."

0 commit comments

Comments
 (0)