Skip to content

Commit

Permalink
Patch release 0.5.1 (#1101)
Browse files Browse the repository at this point in the history
* Set init_state_dict

* pass openvino metadata as config

* DictConfig -> dict

* Update changelog

* Update date

* metadata_path -> metadata

---------

Co-authored-by: Ashwin Vaidya <ashwinitinvaidya@gmail.com>
  • Loading branch information
ashwinvaidya17 and Ashwin Vaidya committed May 24, 2023
1 parent 5fa148f commit 2a546fb
Show file tree
Hide file tree
Showing 9 changed files with 34 additions and 20 deletions.
6 changes: 5 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,20 @@ All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

## [Unreleased]
## [v0.5.1] - 2023-05-24

### Added

### Changed

- Rename `metadata_path` to `metadata` in `OpenvinoInferencer` in https://github.com/openvinotoolkit/anomalib/pull/1101

### Deprecated

###  Fixed

- Fix `init_state_dict` bug in `wrap_nncf_model` in https://github.com/openvinotoolkit/anomalib/pull/1101

## [v0.5.0] - 2023-05-09

### Added
Expand Down
16 changes: 12 additions & 4 deletions notebooks/000_getting_started/001_getting_started.ipynb
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
Expand Down Expand Up @@ -112,6 +113,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
Expand Down Expand Up @@ -166,6 +168,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
Expand All @@ -187,6 +190,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
Expand All @@ -204,6 +208,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
Expand Down Expand Up @@ -237,6 +242,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
Expand All @@ -261,6 +267,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
Expand Down Expand Up @@ -322,6 +329,7 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
Expand Down Expand Up @@ -434,8 +442,8 @@
],
"source": [
"openvino_model_path = output_path / \"weights\" / \"openvino\" / \"model.bin\"\n",
"metadata_path = output_path / \"weights\" / \"openvino\" / \"metadata.json\"\n",
"print(openvino_model_path.exists(), metadata_path.exists())"
"metadata = output_path / \"weights\" / \"openvino\" / \"metadata.json\"\n",
"print(openvino_model_path.exists(), metadata.exists())"
]
},
{
Expand All @@ -446,7 +454,7 @@
"source": [
"inferencer = OpenVINOInferencer(\n",
" path=openvino_model_path, # Path to the OpenVINO IR model.\n",
" metadata_path=metadata_path, # Path to the metadata file.\n",
" metadata=metadata, # Path to the metadata file.\n",
" device=\"CPU\", # We would like to run it on an Intel CPU.\n",
")"
]
Expand Down Expand Up @@ -699,7 +707,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.13"
"version": "3.10.11"
},
"orig_nbformat": 4,
"vscode": {
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ show_error_codes = true


[[tool.mypy.overrides]]
module = "torch.*"
module = ["torch.*", "wandb.*"]
follow_imports = "skip"
follow_imports_for_stubs = true

Expand Down
2 changes: 1 addition & 1 deletion src/anomalib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

__version__ = "1.0.0dev"
__version__ = "0.5.1"
11 changes: 5 additions & 6 deletions src/anomalib/deploy/inferencers/openvino_inferencer.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,31 +19,30 @@
from .base_inferencer import Inferencer

if find_spec("openvino") is not None:
from openvino.inference_engine import ( # type: ignore # pylint: disable=no-name-in-module
IECore,
)
from openvino.inference_engine import IECore # type: ignore # pylint: disable=no-name-in-module


class OpenVINOInferencer(Inferencer):
"""OpenVINO implementation for the inference.
Args:
path (str | Path): Path to the openvino onnx, xml or bin file.
metadata_path (str | Path, optional): Path to metadata file. Defaults to None.
metadata (str | Path | dict, optional): Path to metadata file or a dict object defining the
metadata. Defaults to None.
device (str | None, optional): Device to run the inference on. Defaults to "CPU".
task (TaskType | None, optional): Task type. Defaults to None.
"""

def __init__(
self,
path: str | Path | tuple[bytes, bytes],
metadata_path: str | Path | None = None,
metadata: str | Path | dict | None = None,
device: str | None = "CPU",
task: str | None = None,
) -> None:
self.device = device
self.input_blob, self.output_blob, self.network = self.load_model(path)
self.metadata = super()._load_metadata(metadata_path)
self.metadata = metadata if isinstance(metadata, dict) else super()._load_metadata(metadata)
self.task = TaskType(task) if task else TaskType(self.metadata["task"])

def load_model(self, path: str | Path | tuple[bytes, bytes]):
Expand Down
5 changes: 4 additions & 1 deletion src/anomalib/utils/callbacks/nncf/callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,10 @@ def setup(self, trainer: pl.Trainer, pl_module: pl.LightningModule, stage: str |
config = register_default_init_args(self.config, init_loader)

self.nncf_ctrl, pl_module.model = wrap_nncf_model(
model=pl_module.model, config=config, dataloader=trainer.datamodule.train_dataloader() # type: ignore
model=pl_module.model,
config=config,
dataloader=trainer.datamodule.train_dataloader(), # type: ignore
init_state_dict=None, # type: ignore
)

def on_train_batch_start(
Expand Down
2 changes: 1 addition & 1 deletion src/anomalib/utils/sweep/helpers/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def get_openvino_throughput(model_path: str | Path, test_dataset: Dataset) -> fl

inferencer = OpenVINOInferencer(
path=model_path / "weights" / "openvino" / "model.xml",
metadata_path=model_path / "weights" / "openvino" / "metadata.json",
metadata=model_path / "weights" / "openvino" / "metadata.json",
)
start_time = time.time()
for image_path in test_dataset.samples.image_path:
Expand Down
8 changes: 4 additions & 4 deletions tools/inference/gradio_inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,12 @@ def get_parser() -> ArgumentParser:
return parser


def get_inferencer(weight_path: Path, metadata_path: Path | None = None) -> Inferencer:
def get_inferencer(weight_path: Path, metadata: Path | None = None) -> Inferencer:
"""Parse args and open inferencer.
Args:
weight_path (Path): Path to model weights.
metadata_path (Path | None, optional): Metadata is required for OpenVINO models. Defaults to None.
metadata (Path | None, optional): Metadata is required for OpenVINO models. Defaults to None.
Raises:
ValueError: If unsupported model weight is passed.
Expand All @@ -64,11 +64,11 @@ def get_inferencer(weight_path: Path, metadata_path: Path | None = None) -> Infe
inferencer = torch_inferencer(path=weight_path)

elif extension in (".onnx", ".bin", ".xml"):
if metadata_path is None:
if metadata is None:
raise ValueError("When using OpenVINO Inferencer, the following arguments are required: --metadata")

openvino_inferencer = getattr(module, "OpenVINOInferencer")
inferencer = openvino_inferencer(path=weight_path, metadata_path=metadata_path)
inferencer = openvino_inferencer(path=weight_path, metadata=metadata)

else:
raise ValueError(
Expand Down
2 changes: 1 addition & 1 deletion tools/inference/openvino_inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def infer(args: Namespace) -> None:
args (Namespace): The arguments from the command line.
"""
# Get the inferencer.
inferencer = OpenVINOInferencer(path=args.weights, metadata_path=args.metadata, device=args.device)
inferencer = OpenVINOInferencer(path=args.weights, metadata=args.metadata, device=args.device)
visualizer = Visualizer(mode=args.visualization_mode, task=args.task)

filenames = get_image_filenames(path=args.input)
Expand Down

0 comments on commit 2a546fb

Please sign in to comment.