diff --git a/docs/source/markdown/guides/how_to/models/post_processor.md b/docs/source/markdown/guides/how_to/models/post_processor.md index df86001c83..d2b57aaa22 100644 --- a/docs/source/markdown/guides/how_to/models/post_processor.md +++ b/docs/source/markdown/guides/how_to/models/post_processor.md @@ -144,7 +144,7 @@ One key advantage of Anomalib's post-processor design is that it becomes part of ```python from anomalib.models import Patchcore from anomalib.post_processing import PostProcessor -from openvino.runtime import Core +from openvino import Core import numpy as np # Training: Post-processor is part of the model diff --git a/pyproject.toml b/pyproject.toml index bce9bc0516..8ef17559ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,10 +52,7 @@ core = [ "lightning>=2.2", "torch>=2", "torchmetrics>=1.3.2", - # NOTE: open-clip-torch throws the following error on v2.26.1 - # torch.onnx.errors.UnsupportedOperatorError: Exporting the operator - # 'aten::_native_multi_head_attention' to ONNX opset version 14 is not supported - "open-clip-torch>=2.23.0,<2.26.1", + "open-clip-torch>=2.32.0", "fvcore", ] openvino = ["openvino>=2024.0", "nncf>=2.10.0", "onnx>=1.16.0"] diff --git a/src/anomalib/deploy/inferencers/openvino_inferencer.py b/src/anomalib/deploy/inferencers/openvino_inferencer.py index 1327efeb1e..dd4292dc45 100644 --- a/src/anomalib/deploy/inferencers/openvino_inferencer.py +++ b/src/anomalib/deploy/inferencers/openvino_inferencer.py @@ -56,7 +56,7 @@ import numpy as np import torch from lightning_utilities.core.imports import module_available -from openvino.runtime.utils.data_helpers.wrappers import OVDict +from openvino.utils.data_helpers.wrappers import OVDict from PIL.Image import Image as PILImage from anomalib.data import NumpyImageBatch diff --git a/src/anomalib/models/components/base/export_mixin.py b/src/anomalib/models/components/base/export_mixin.py index c8f807b469..310721d884 100644 --- a/src/anomalib/models/components/base/export_mixin.py +++ b/src/anomalib/models/components/base/export_mixin.py @@ -159,6 +159,7 @@ def to_onnx( dynamic_axes=dynamic_axes, input_names=["input"], output_names=output_names, + dynamo=True, ) return onnx_path