Skip to content

Commit 29788e1

Browse files
committed
Revert "export model file name v2"
This reverts commit 37c8306.
1 parent 37c8306 commit 29788e1

File tree

2 files changed

+4
-16
lines changed

2 files changed

+4
-16
lines changed

src/anomalib/engine/engine.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -728,7 +728,6 @@ def export(
728728
model: AnomalibModule,
729729
export_type: ExportType | str,
730730
export_root: str | Path | None = None,
731-
model_file_name: str = "model",
732731
input_size: tuple[int, int] | None = None,
733732
compression_type: CompressionType | None = None,
734733
datamodule: AnomalibDataModule | None = None,
@@ -743,8 +742,6 @@ def export(
743742
export_type (ExportType): Export type.
744743
export_root (str | Path | None, optional): Path to the output directory. If it is not set, the model is
745744
exported to trainer.default_root_dir. Defaults to None.
746-
model_file_name (str = "model"): Name of the exported model file. If it is not set, the model is
747-
is called "model". Defaults to "model".
748745
input_size (tuple[int, int] | None, optional): A statis input shape for the model, which is exported to ONNX
749746
and OpenVINO format. Defaults to None.
750747
compression_type (CompressionType | None, optional): Compression type for OpenVINO exporting only.
@@ -801,18 +798,15 @@ def export(
801798
if export_type == ExportType.TORCH:
802799
exported_model_path = model.to_torch(
803800
export_root=export_root,
804-
model_file_name=model_file_name,
805801
)
806802
elif export_type == ExportType.ONNX:
807803
exported_model_path = model.to_onnx(
808804
export_root=export_root,
809-
model_file_name=model_file_name,
810805
input_size=input_size,
811806
)
812807
elif export_type == ExportType.OPENVINO:
813808
exported_model_path = model.to_openvino(
814809
export_root=export_root,
815-
model_file_name=model_file_name,
816810
input_size=input_size,
817811
compression_type=compression_type,
818812
datamodule=datamodule,

src/anomalib/models/components/base/export_mixin.py

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -80,13 +80,11 @@ class ExportMixin:
8080
def to_torch(
8181
self,
8282
export_root: Path | str,
83-
model_file_name: str,
8483
) -> Path:
8584
"""Export model to PyTorch format.
8685
8786
Args:
8887
export_root (Path | str): Path to the output folder
89-
model_file_name (str): Name of the exported model
9088
9189
Returns:
9290
Path: Path to the exported PyTorch model (.pt file)
@@ -101,7 +99,7 @@ def to_torch(
10199
PosixPath('./exports/weights/torch/model.pt')
102100
"""
103101
export_root = _create_export_root(export_root, ExportType.TORCH)
104-
pt_model_path = export_root / (model_file_name + ".pt")
102+
pt_model_path = export_root / "model.pt"
105103
torch.save(
106104
obj={"model": self},
107105
f=pt_model_path,
@@ -111,14 +109,12 @@ def to_torch(
111109
def to_onnx(
112110
self,
113111
export_root: Path | str,
114-
model_file_name: str,
115112
input_size: tuple[int, int] | None = None,
116113
) -> Path:
117114
"""Export model to ONNX format.
118115
119116
Args:
120117
export_root (Path | str): Path to the output folder
121-
model_file_name (str): Name of the exported model.
122118
input_size (tuple[int, int] | None): Input image dimensions (height, width).
123119
If ``None``, uses dynamic input shape. Defaults to ``None``
124120
@@ -147,7 +143,7 @@ def to_onnx(
147143
if input_size
148144
else {"input": {0: "batch_size", 2: "height", 3: "weight"}, "output": {0: "batch_size"}}
149145
)
150-
onnx_path = export_root / (model_file_name + ".onnx")
146+
onnx_path = export_root / "model.onnx"
151147
# apply pass through the model to get the output names
152148
assert isinstance(self, LightningModule) # mypy
153149
output_names = [name for name, value in self.eval()(input_shape)._asdict().items() if value is not None]
@@ -166,7 +162,6 @@ def to_onnx(
166162
def to_openvino(
167163
self,
168164
export_root: Path | str,
169-
model_file_name: str,
170165
input_size: tuple[int, int] | None = None,
171166
compression_type: CompressionType | None = None,
172167
datamodule: AnomalibDataModule | None = None,
@@ -178,7 +173,6 @@ def to_openvino(
178173
179174
Args:
180175
export_root (Path | str): Path to the output folder
181-
model_file_name (str): Name of the exported model
182176
input_size (tuple[int, int] | None): Input image dimensions (height, width).
183177
If ``None``, uses dynamic input shape. Defaults to ``None``
184178
compression_type (CompressionType | None): Type of compression to apply.
@@ -224,9 +218,9 @@ def to_openvino(
224218
import openvino as ov
225219

226220
with TemporaryDirectory() as onnx_directory:
227-
model_path = self.to_onnx(onnx_directory, model_file_name, input_size)
221+
model_path = self.to_onnx(onnx_directory, input_size)
228222
export_root = _create_export_root(export_root, ExportType.OPENVINO)
229-
ov_model_path = export_root / (model_file_name + ".xml")
223+
ov_model_path = export_root / "model.xml"
230224
ov_args = {} if ov_args is None else ov_args
231225

232226
model = ov.convert_model(model_path, **ov_args)

0 commit comments

Comments
 (0)