Skip to content

Commit

Permalink
Layout Support (#13)
Browse files Browse the repository at this point in the history
  • Loading branch information
kozlov721 authored Jul 25, 2024
1 parent 6e157e7 commit 895499f
Show file tree
Hide file tree
Showing 20 changed files with 578 additions and 226 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,11 +73,13 @@ Requires `hailo_ai_sw_suite_2024-04:1` docker image to be present on the system.
```bash
docker build -f docker/<package>/Dockerfile.public -t luxonis/modelconverter-<package>:latest .
```

1. For easier use, you can install the ModelConverter CLI. You can install it from PyPI using the following command:

```bash
pip install modelconv
```

For usage instructions, see `modelconverter --help`.

### GPU Support
Expand Down
14 changes: 9 additions & 5 deletions modelconverter/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def infer(
),
],
path: PathOption,
output_dir: OutputDirOption = None,
output_dir: OutputDirOption,
stage: Annotated[
Optional[str],
typer.Option(
Expand Down Expand Up @@ -251,11 +251,10 @@ def infer(
try:
mult_cfg, _, _ = get_configs(path, opts)
cfg = mult_cfg.get_stage_config(stage)
output_path = get_output_dir_name(
target, mult_cfg.name, output_dir
)
Inferer = get_inferer(target)
Inferer.from_config(model_path, input_path, output_path, cfg).run()
Inferer.from_config(
model_path, input_path, Path(output_dir), cfg
).run()
except Exception:
logger.exception("Encountered an unexpected error!")
exit(2)
Expand Down Expand Up @@ -430,6 +429,8 @@ def convert(
if not isinstance(out_models, list):
out_models = [out_models]
if to == Format.NN_ARCHIVE:
from modelconverter.packages.base_exporter import Exporter

logger.info("Converting to NN archive")
assert main_stage is not None
if len(out_models) > 1:
Expand All @@ -442,6 +443,9 @@ def convert(
archive_cfg,
preprocessing,
main_stage,
exporter.inference_model_path
if isinstance(exporter, Exporter)
else exporter.exporters[main_stage].inference_model_path,
)
generator = ArchiveGenerator(
archive_name=f"{cfg.name}.{target.value.lower()}",
Expand Down
7 changes: 3 additions & 4 deletions modelconverter/packages/base_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from importlib.metadata import version
from logging import getLogger
from pathlib import Path
from typing import Any, Dict, List, Optional, Union, cast
from typing import Any, Dict, List, Optional, Union

import numpy as np
import onnx
Expand Down Expand Up @@ -85,18 +85,17 @@ def __init__(
logger.warning(
f"Random calibration is being used for input '{name}'."
)
shape = cast(List[int], inp.shape)
dest = self.intermediate_outputs_dir / "random" / name
dest.mkdir(parents=True)
if shape is None or not all(isinstance(dim, int) for dim in shape):
if inp.shape is None:
exit_with(
ValueError(
f"Random calibration requires shape to be specified for input '{name}'."
)
)

for i in range(calib.max_images):
arr = np.random.normal(calib.mean, calib.std, shape)
arr = np.random.normal(calib.mean, calib.std, inp.shape)
arr = np.clip(arr, calib.min_value, calib.max_value)

arr = arr.astype(calib.data_type.as_numpy_dtype())
Expand Down
13 changes: 11 additions & 2 deletions modelconverter/packages/base_inferer.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,22 @@ def __post_init__(self):
def from_config(
cls, model_path: str, src: Path, dest: Path, config: SingleStageConfig
):
for container, typ_name in zip(
[config.inputs, config.outputs], ["input", "output"]
):
for node in container:
if node.shape is None:
raise ValueError(
f"Shape for {typ_name} '{node.name}' must be provided."
)

return cls(
model_path=resolve_path(model_path, Path.cwd()),
src=src,
dest=dest,
in_shapes={inp.name: inp.shape for inp in config.inputs},
in_shapes={inp.name: inp.shape for inp in config.inputs}, # type: ignore
in_dtypes={inp.name: inp.data_type for inp in config.inputs},
out_shapes={out.name: out.shape for out in config.outputs},
out_shapes={out.name: out.shape for out in config.outputs}, # type: ignore
out_dtypes={out.name: out.data_type for out in config.outputs},
resize_method={
inp.name: inp.calibration.resize_method
Expand Down
8 changes: 5 additions & 3 deletions modelconverter/packages/hailo/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ def __init__(self, config: SingleStageConfig, output_dir: Path):
self.optimization_level = config.hailo.optimization_level
self.compression_level = config.hailo.compression_level
self.batch_size = config.hailo.batch_size
self.early_stop = config.hailo.early_stop
self.disable_compilation = config.hailo.disable_compilation
self._alls: List[str] = []
self.hw_arch = config.hailo.hw_arch
if not tf.config.list_physical_devices("GPU"):
logger.error(
Expand Down Expand Up @@ -113,12 +114,13 @@ def export(self) -> Path:
har_path = self.input_model.with_suffix(".har")
runner.save_har(har_path)
if self._disable_calibration:
self._inference_model_path = har_path
return har_path

quantized_har_path = self._calibrate(har_path)
self._inference_model_path = Path(quantized_har_path)
if self.early_stop:
logger.info("Early stop enabled. Skipping compilation.")
if self.disable_compilation:
logger.warning("Compilation disabled, skipping compilation.")
copy_path = Path(quantized_har_path).parent / (
Path(quantized_har_path).stem + "_copy.har"
)
Expand Down
6 changes: 3 additions & 3 deletions modelconverter/packages/rvc2/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@

logger = getLogger(__name__)

COMPILE_TOOL: Final[str] = (
f'{env["INTEL_OPENVINO_DIR"]}/tools/compile_tool/compile_tool'
)
COMPILE_TOOL: Final[
str
] = f'{env["INTEL_OPENVINO_DIR"]}/tools/compile_tool/compile_tool'

DEFAULT_SUPER_SHAVES: Final[int] = 8

Expand Down
1 change: 1 addition & 0 deletions modelconverter/packages/rvc3/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ def export(self) -> Path:
xml_path,
output_dir=str(output_dir),
)
self._inference_model_path = calibrated_xml_path
output_path = (
self.output_dir
/ f"{self.model_name}-{self.target.name.lower()}-int8"
Expand Down
6 changes: 6 additions & 0 deletions modelconverter/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
upload_file_to_remote,
)
from .image import read_calib_dir, read_image
from .layout import guess_new_layout, make_default_layout
from .metadata import Metadata, get_metadata
from .nn_archive import (
get_archive_input,
modelconverter_config_to_nn,
Expand Down Expand Up @@ -49,4 +51,8 @@
"get_docker_image",
"docker_exec",
"in_docker",
"guess_new_layout",
"make_default_layout",
"Metadata",
"get_metadata",
]
Loading

0 comments on commit 895499f

Please sign in to comment.