From b09ca3b20318a6543166f89fe8469b063ab752b5 Mon Sep 17 00:00:00 2001 From: Jerry Guo Date: Fri, 17 Nov 2023 09:54:30 +0100 Subject: [PATCH 1/8] Added an interface and simple test for `get_dataset_batch_size`. Making use of existing utils function in validation. Signed-off-by: Jerry Guo --- src/power_grid_model/utils.py | 17 +++++++++++++++++ tests/unit/test_utils.py | 20 ++++++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index 905263f22..1f59b2db4 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -23,6 +23,7 @@ ) from power_grid_model.data_types import BatchArray, BatchDataset, Dataset, SingleDataset from power_grid_model.errors import PowerGridSerializationError +from power_grid_model._utils import get_and_verify_batch_sizes _DEPRECATED_FUNCTION_MSG = "This function is deprecated." _DEPRECATED_JSON_DESERIALIZATION_MSG = f"{_DEPRECATED_FUNCTION_MSG} Please use json_deserialize_to_file instead." @@ -54,6 +55,22 @@ def _get_component_scenario(component_scenarios: BatchArray) -> np.ndarray: return {component: _get_component_scenario(component_data) for component, component_data in dataset.items()} +def get_dataset_batch_size(dataset: BatchDataset) -> int: + """ + Get the number of scenarios in the batch dataset. + + Args: + dataset: the batch dataset + + Raises: + ValueError: if the batch dataset is inconsistent. + + Returns: + The size of the batch dataset. Making use of existing _utils function. + """ + return get_and_verify_batch_sizes(dataset) + + def json_deserialize_from_file(file_path: Path) -> Dataset: """ Load and deserialize a JSON file to a new dataset. diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 3cc133afd..028a4a009 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -8,6 +8,9 @@ import numpy as np import pytest +from typing import Dict + +from power_grid_model import LoadGenType, initialize_array from power_grid_model.data_types import Dataset from power_grid_model.utils import ( export_json_data, @@ -16,6 +19,7 @@ json_serialize_to_file, msgpack_deserialize_from_file, msgpack_serialize_to_file, + get_dataset_batch_size, ) @@ -41,6 +45,22 @@ def test_get_dataset_scenario(): get_dataset_scenario(data, 2) +@pytest.fixture +def batch_data() -> Dict[str, np.ndarray]: + line = initialize_array("update", "line", (3, 2)) + line["id"] = [[5, 6], [6, 7], [7, 5]] + line["from_status"] = [[1, 1], [1, 1], [1, 1]] + + # Add batch for asym_load, which has 2-D array for p_specified + asym_load = initialize_array("update", "asym_load", (3, 2)) + asym_load["id"] = [[9, 10], [9, 10], [9, 10]] + + return {"line": line, "asym_load": asym_load} + +def test_get_dataset_batch_size(batch_data): + assert get_dataset_batch_size(batch_data) == 3 + + @patch("builtins.open", new_callable=mock_open) @patch("power_grid_model.utils.json_deserialize") def test_json_deserialize_from_file(deserialize_mock: MagicMock, open_mock: MagicMock): From 712eec2111ec683b62f0b2644be28a328f438e18 Mon Sep 17 00:00:00 2001 From: Jerry Guo Date: Fri, 17 Nov 2023 09:56:56 +0100 Subject: [PATCH 2/8] Reformatted Signed-off-by: Jerry Guo --- src/power_grid_model/utils.py | 2 +- tests/unit/test_utils.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index 1f59b2db4..bdac83532 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -64,7 +64,7 @@ def get_dataset_batch_size(dataset: BatchDataset) -> int: Raises: ValueError: if the batch dataset is inconsistent. - + Returns: The size of the batch dataset. Making use of existing _utils function. """ diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 028a4a009..0d336c4a8 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -57,9 +57,10 @@ def batch_data() -> Dict[str, np.ndarray]: return {"line": line, "asym_load": asym_load} + def test_get_dataset_batch_size(batch_data): assert get_dataset_batch_size(batch_data) == 3 - + @patch("builtins.open", new_callable=mock_open) @patch("power_grid_model.utils.json_deserialize") From ea38cf24eb92431ddfbb7b7ea54bcce22fa6be4d Mon Sep 17 00:00:00 2001 From: Jerry Guo Date: Fri, 17 Nov 2023 12:21:47 +0100 Subject: [PATCH 3/8] Fix isort and reuse. Under version 1.1.2 for reuse. Plus some formatting. Signed-off-by: Jerry Guo --- .pre-commit-config.yaml | 2 +- src/power_grid_model/utils.py | 2 +- tests/unit/test_utils.py | 5 ++--- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 75c10d9b9..f41de1e35 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ repos: - repo: https://github.com/fsfe/reuse-tool - rev: v1.0.0 + rev: v1.1.2 hooks: - id: reuse - repo: https://github.com/pycqa/isort diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index bdac83532..1e2124703 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -14,6 +14,7 @@ import numpy as np +from power_grid_model._utils import get_and_verify_batch_sizes from power_grid_model.core.power_grid_dataset import get_dataset_type from power_grid_model.core.serialization import ( # pylint: disable=unused-import json_deserialize, @@ -23,7 +24,6 @@ ) from power_grid_model.data_types import BatchArray, BatchDataset, Dataset, SingleDataset from power_grid_model.errors import PowerGridSerializationError -from power_grid_model._utils import get_and_verify_batch_sizes _DEPRECATED_FUNCTION_MSG = "This function is deprecated." _DEPRECATED_JSON_DESERIALIZATION_MSG = f"{_DEPRECATED_FUNCTION_MSG} Please use json_deserialize_to_file instead." diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 0d336c4a8..cc72199c2 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -3,23 +3,22 @@ # SPDX-License-Identifier: MPL-2.0 from pathlib import Path +from typing import Dict from unittest.mock import MagicMock, mock_open, patch import numpy as np import pytest -from typing import Dict - from power_grid_model import LoadGenType, initialize_array from power_grid_model.data_types import Dataset from power_grid_model.utils import ( export_json_data, + get_dataset_batch_size, get_dataset_scenario, json_deserialize_from_file, json_serialize_to_file, msgpack_deserialize_from_file, msgpack_serialize_to_file, - get_dataset_batch_size, ) From 3d77bdc08c8e907452bc2e4994ca233cd9c7eeb4 Mon Sep 17 00:00:00 2001 From: Jerry Guo Date: Fri, 17 Nov 2023 12:25:56 +0100 Subject: [PATCH 4/8] Bumped Reuse to the latest, one that causes no issue. Signed-off-by: Jerry Guo --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f41de1e35..3dca57fa6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ repos: - repo: https://github.com/fsfe/reuse-tool - rev: v1.1.2 + rev: v2.1.0 hooks: - id: reuse - repo: https://github.com/pycqa/isort From 7a82743a179dadebf7df0ed5a7583a4053f0882f Mon Sep 17 00:00:00 2001 From: Jerry Guo Date: Fri, 17 Nov 2023 15:52:31 +0100 Subject: [PATCH 5/8] Added a test case for sparse batch dataset. Signed-off-by: Jerry Guo --- src/power_grid_model/utils.py | 4 +++- tests/unit/test_utils.py | 25 +++++++++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index 1e2124703..5b334b751 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -15,7 +15,7 @@ import numpy as np from power_grid_model._utils import get_and_verify_batch_sizes -from power_grid_model.core.power_grid_dataset import get_dataset_type +from power_grid_model.core.power_grid_dataset import CConstDataset, get_dataset_type from power_grid_model.core.serialization import ( # pylint: disable=unused-import json_deserialize, json_serialize, @@ -68,6 +68,8 @@ def get_dataset_batch_size(dataset: BatchDataset) -> int: Returns: The size of the batch dataset. Making use of existing _utils function. """ + if isinstance(dataset, CConstDataset): + return dataset.get_info().batch_size() return get_and_verify_batch_sizes(dataset) diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index cc72199c2..7c8fa1e9a 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -10,6 +10,8 @@ import pytest from power_grid_model import LoadGenType, initialize_array +from power_grid_model.core.power_grid_dataset import CConstDataset +from power_grid_model.core.power_grid_meta import power_grid_meta_data from power_grid_model.data_types import Dataset from power_grid_model.utils import ( export_json_data, @@ -61,6 +63,29 @@ def test_get_dataset_batch_size(batch_data): assert get_dataset_batch_size(batch_data) == 3 +def test_get_dataset_batch_size_sparse(): + batch_size = 3 + data = { + "node": { + "data": np.zeros(shape=3, dtype=power_grid_meta_data["input"]["node"]), + "indptr": np.array([0, 2, 3, 3]), + }, + "sym_load": { + "data": np.zeros(shape=2, dtype=power_grid_meta_data["input"]["sym_load"]), + "indptr": np.array([0, 0, 1, 2]), + }, + "asym_load": { + "data": np.zeros(shape=4, dtype=power_grid_meta_data["input"]["asym_load"]), + "indptr": np.array([0, 2, 3, 4]), + }, + "link": np.zeros(shape=(batch_size, 4), dtype=power_grid_meta_data["input"]["link"]), + } + + dataset = CConstDataset(data, ["input"]) + + assert get_dataset_batch_size(dataset) == 3 + + @patch("builtins.open", new_callable=mock_open) @patch("power_grid_model.utils.json_deserialize") def test_json_deserialize_from_file(deserialize_mock: MagicMock, open_mock: MagicMock): From 8549d5165f2b3ff655cd5d5582703425accda4c1 Mon Sep 17 00:00:00 2001 From: Jerry Guo Date: Thu, 23 Nov 2023 09:31:14 +0100 Subject: [PATCH 6/8] Added test cases to verify the `ValueError` in case of inconsistent batch size across components; removed `CConstDataset` interface, use `Dict[str, ndarray]` instead for sparse batch data creation; included `batch_data_array` size functionality and test cases; added interface import to the api reference. Signed-off-by: Jerry Guo --- docs/api_reference/python-api-reference.md | 2 + src/power_grid_model/utils.py | 21 ++++++-- tests/unit/test_utils.py | 59 +++++++++++++++++----- 3 files changed, 64 insertions(+), 18 deletions(-) diff --git a/docs/api_reference/python-api-reference.md b/docs/api_reference/python-api-reference.md index fa69b6e2f..b5bf1197c 100644 --- a/docs/api_reference/python-api-reference.md +++ b/docs/api_reference/python-api-reference.md @@ -52,4 +52,6 @@ SPDX-License-Identifier: MPL-2.0 .. autofunction:: power_grid_model.utils.msgpack_serialize_to_file .. autofunction:: power_grid_model.utils.import_json_data .. autofunction:: power_grid_model.utils.export_json_data +.. autofunction:: power_grid_model._utils.get_and_verify_batch_sizes +.. autofunction:: power_grid_model._utils.get_batch_size ``` diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index 5b334b751..dc1b3abb3 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -14,8 +14,8 @@ import numpy as np -from power_grid_model._utils import get_and_verify_batch_sizes -from power_grid_model.core.power_grid_dataset import CConstDataset, get_dataset_type +from power_grid_model._utils import get_and_verify_batch_sizes, get_batch_size # pylint: disable=unused-import +from power_grid_model.core.power_grid_dataset import get_dataset_type from power_grid_model.core.serialization import ( # pylint: disable=unused-import json_deserialize, json_serialize, @@ -55,7 +55,7 @@ def _get_component_scenario(component_scenarios: BatchArray) -> np.ndarray: return {component: _get_component_scenario(component_data) for component, component_data in dataset.items()} -def get_dataset_batch_size(dataset: BatchDataset) -> int: +def get_data_set_batch_size(dataset: BatchDataset) -> int: """ Get the number of scenarios in the batch dataset. @@ -68,11 +68,22 @@ def get_dataset_batch_size(dataset: BatchDataset) -> int: Returns: The size of the batch dataset. Making use of existing _utils function. """ - if isinstance(dataset, CConstDataset): - return dataset.get_info().batch_size() return get_and_verify_batch_sizes(dataset) +def get_data_array_batch_size(data_array: BatchArray) -> int: + """ + Determine the number of batches and verify the data structure + + Args: + data_array: a batch array for power-grid-model + + Returns: + The number of batches in data_array + """ + return get_batch_size(data_array) + + def json_deserialize_from_file(file_path: Path) -> Dataset: """ Load and deserialize a JSON file to a new dataset. diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 7c8fa1e9a..cd0a19ef5 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -10,12 +10,12 @@ import pytest from power_grid_model import LoadGenType, initialize_array -from power_grid_model.core.power_grid_dataset import CConstDataset from power_grid_model.core.power_grid_meta import power_grid_meta_data from power_grid_model.data_types import Dataset from power_grid_model.utils import ( export_json_data, - get_dataset_batch_size, + get_data_array_batch_size, + get_data_set_batch_size, get_dataset_scenario, json_deserialize_from_file, json_serialize_to_file, @@ -46,25 +46,20 @@ def test_get_dataset_scenario(): get_dataset_scenario(data, 2) -@pytest.fixture -def batch_data() -> Dict[str, np.ndarray]: +def test_get_data_set_batch_size(): line = initialize_array("update", "line", (3, 2)) line["id"] = [[5, 6], [6, 7], [7, 5]] line["from_status"] = [[1, 1], [1, 1], [1, 1]] - # Add batch for asym_load, which has 2-D array for p_specified asym_load = initialize_array("update", "asym_load", (3, 2)) asym_load["id"] = [[9, 10], [9, 10], [9, 10]] - return {"line": line, "asym_load": asym_load} + batch_data = {"line": line, "asym_load": asym_load} - -def test_get_dataset_batch_size(batch_data): - assert get_dataset_batch_size(batch_data) == 3 + assert get_data_set_batch_size(batch_data) == 3 def test_get_dataset_batch_size_sparse(): - batch_size = 3 data = { "node": { "data": np.zeros(shape=3, dtype=power_grid_meta_data["input"]["node"]), @@ -78,12 +73,50 @@ def test_get_dataset_batch_size_sparse(): "data": np.zeros(shape=4, dtype=power_grid_meta_data["input"]["asym_load"]), "indptr": np.array([0, 2, 3, 4]), }, - "link": np.zeros(shape=(batch_size, 4), dtype=power_grid_meta_data["input"]["link"]), } - dataset = CConstDataset(data, ["input"]) + assert get_data_set_batch_size(data) == 3 + + +def test_get_dataset_batch_size_mixed(): + line = initialize_array("update", "line", (3, 2)) + line["id"] = [[5, 6], [6, 7], [7, 5]] + line["from_status"] = [[1, 1], [1, 1], [1, 1]] + + asym_load = initialize_array("update", "asym_load", (2, 2)) + asym_load["id"] = [[9, 10], [9, 10]] + + data_dense = {"line": line, "asym_load": asym_load} + data_sparse = { + "node": { + "data": np.zeros(shape=3, dtype=power_grid_meta_data["input"]["node"]), + "indptr": np.array([0, 2, 3, 3, 5]), + }, + "sym_load": { + "data": np.zeros(shape=2, dtype=power_grid_meta_data["input"]["sym_load"]), + "indptr": np.array([0, 0, 1, 2]), + }, + "asym_load": { + "data": np.zeros(shape=4, dtype=power_grid_meta_data["input"]["asym_load"]), + "indptr": np.array([0, 2, 3]), + }, + } + with pytest.raises(ValueError): + get_data_set_batch_size(data_dense) + with pytest.raises(ValueError): + get_data_set_batch_size(data_sparse) + + +def test_get_data_array_batch_size(): + asym_load = initialize_array("update", "asym_load", (3, 2)) + asym_load["id"] = [[9, 10], [9, 10], [9, 10]] - assert get_dataset_batch_size(dataset) == 3 + sym_load = { + "data": np.zeros(shape=2, dtype=power_grid_meta_data["input"]["sym_load"]), + "indptr": np.array([0, 0, 1, 2]), + } + assert get_data_array_batch_size(asym_load) == 3 + assert get_data_array_batch_size(sym_load) == 3 @patch("builtins.open", new_callable=mock_open) From acd1fd1e125cf0f984beb15afbc01a1391859180 Mon Sep 17 00:00:00 2001 From: Jerry Guo Date: Fri, 24 Nov 2023 10:36:20 +0100 Subject: [PATCH 7/8] Final touch: rename `data_set` to `dataset`; `data_array` to `component`; etc Signed-off-by: Jerry Guo --- src/power_grid_model/utils.py | 11 ++++++----- tests/unit/test_utils.py | 18 +++++++++--------- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index dc1b3abb3..b77d52d94 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -14,7 +14,8 @@ import numpy as np -from power_grid_model._utils import get_and_verify_batch_sizes, get_batch_size # pylint: disable=unused-import +from power_grid_model._utils import get_and_verify_batch_sizes as _get_and_verify_batch_sizes +from power_grid_model._utils import get_batch_size as _get_batch_size from power_grid_model.core.power_grid_dataset import get_dataset_type from power_grid_model.core.serialization import ( # pylint: disable=unused-import json_deserialize, @@ -55,7 +56,7 @@ def _get_component_scenario(component_scenarios: BatchArray) -> np.ndarray: return {component: _get_component_scenario(component_data) for component, component_data in dataset.items()} -def get_data_set_batch_size(dataset: BatchDataset) -> int: +def get_dataset_batch_size(dataset: BatchDataset) -> int: """ Get the number of scenarios in the batch dataset. @@ -68,10 +69,10 @@ def get_data_set_batch_size(dataset: BatchDataset) -> int: Returns: The size of the batch dataset. Making use of existing _utils function. """ - return get_and_verify_batch_sizes(dataset) + return _get_and_verify_batch_sizes(dataset) -def get_data_array_batch_size(data_array: BatchArray) -> int: +def get_component_batch_size(data_array: BatchArray) -> int: """ Determine the number of batches and verify the data structure @@ -81,7 +82,7 @@ def get_data_array_batch_size(data_array: BatchArray) -> int: Returns: The number of batches in data_array """ - return get_batch_size(data_array) + return _get_batch_size(data_array) def json_deserialize_from_file(file_path: Path) -> Dataset: diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index cd0a19ef5..e297adeff 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -14,8 +14,8 @@ from power_grid_model.data_types import Dataset from power_grid_model.utils import ( export_json_data, - get_data_array_batch_size, - get_data_set_batch_size, + get_component_batch_size, + get_dataset_batch_size, get_dataset_scenario, json_deserialize_from_file, json_serialize_to_file, @@ -56,7 +56,7 @@ def test_get_data_set_batch_size(): batch_data = {"line": line, "asym_load": asym_load} - assert get_data_set_batch_size(batch_data) == 3 + assert get_dataset_batch_size(batch_data) == 3 def test_get_dataset_batch_size_sparse(): @@ -75,7 +75,7 @@ def test_get_dataset_batch_size_sparse(): }, } - assert get_data_set_batch_size(data) == 3 + assert get_dataset_batch_size(data) == 3 def test_get_dataset_batch_size_mixed(): @@ -102,12 +102,12 @@ def test_get_dataset_batch_size_mixed(): }, } with pytest.raises(ValueError): - get_data_set_batch_size(data_dense) + get_dataset_batch_size(data_dense) with pytest.raises(ValueError): - get_data_set_batch_size(data_sparse) + get_dataset_batch_size(data_sparse) -def test_get_data_array_batch_size(): +def test_get_component_batch_size(): asym_load = initialize_array("update", "asym_load", (3, 2)) asym_load["id"] = [[9, 10], [9, 10], [9, 10]] @@ -115,8 +115,8 @@ def test_get_data_array_batch_size(): "data": np.zeros(shape=2, dtype=power_grid_meta_data["input"]["sym_load"]), "indptr": np.array([0, 0, 1, 2]), } - assert get_data_array_batch_size(asym_load) == 3 - assert get_data_array_batch_size(sym_load) == 3 + assert get_component_batch_size(asym_load) == 3 + assert get_component_batch_size(sym_load) == 3 @patch("builtins.open", new_callable=mock_open) From eefa93c4704bc6369a354acb4c3bccf0d6fc3e81 Mon Sep 17 00:00:00 2001 From: Jerry Guo Date: Fri, 24 Nov 2023 11:01:45 +0100 Subject: [PATCH 8/8] Fixed isort config, it fixed multiple other files Signed-off-by: Jerry Guo --- pyproject.toml | 1 + src/power_grid_model/core/options.py | 3 +-- src/power_grid_model/core/power_grid_dataset.py | 9 +++++++-- src/power_grid_model/core/power_grid_meta.py | 3 +-- src/power_grid_model/core/power_grid_model.py | 3 +-- src/power_grid_model/core/serialization.py | 9 +++++++-- src/power_grid_model/utils.py | 9 +++++---- 7 files changed, 23 insertions(+), 14 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 206a8b258..52e1bac7b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -83,6 +83,7 @@ target-version = ['py38'] [tool.isort] profile = "black" +combine_as_imports = true line_length = 120 [tool.pylint] diff --git a/src/power_grid_model/core/options.py b/src/power_grid_model/core/options.py index 86c37f83f..14982b1e0 100644 --- a/src/power_grid_model/core/options.py +++ b/src/power_grid_model/core/options.py @@ -8,8 +8,7 @@ """ from typing import Any, Callable -from power_grid_model.core.power_grid_core import OptionsPtr -from power_grid_model.core.power_grid_core import power_grid_core as pgc +from power_grid_model.core.power_grid_core import OptionsPtr, power_grid_core as pgc class OptionSetter: diff --git a/src/power_grid_model/core/power_grid_dataset.py b/src/power_grid_model/core/power_grid_dataset.py index 157081174..b2c5b9ce9 100644 --- a/src/power_grid_model/core/power_grid_dataset.py +++ b/src/power_grid_model/core/power_grid_dataset.py @@ -19,8 +19,13 @@ get_buffer_view, ) from power_grid_model.core.error_handling import VALIDATOR_MSG, assert_no_error -from power_grid_model.core.power_grid_core import ConstDatasetPtr, DatasetInfoPtr, MutableDatasetPtr, WritableDatasetPtr -from power_grid_model.core.power_grid_core import power_grid_core as pgc +from power_grid_model.core.power_grid_core import ( + ConstDatasetPtr, + DatasetInfoPtr, + MutableDatasetPtr, + WritableDatasetPtr, + power_grid_core as pgc, +) from power_grid_model.core.power_grid_meta import DatasetMetaData, power_grid_meta_data from power_grid_model.errors import PowerGridError diff --git a/src/power_grid_model/core/power_grid_meta.py b/src/power_grid_model/core/power_grid_meta.py index 7e197b4f3..ec5fb6d24 100644 --- a/src/power_grid_model/core/power_grid_meta.py +++ b/src/power_grid_model/core/power_grid_meta.py @@ -12,8 +12,7 @@ import numpy as np -from power_grid_model.core.power_grid_core import AttributePtr, ComponentPtr, DatasetPtr -from power_grid_model.core.power_grid_core import power_grid_core as pgc +from power_grid_model.core.power_grid_core import AttributePtr, ComponentPtr, DatasetPtr, power_grid_core as pgc # constant enum for ctype diff --git a/src/power_grid_model/core/power_grid_model.py b/src/power_grid_model/core/power_grid_model.py index 07f674f74..d03972bfd 100644 --- a/src/power_grid_model/core/power_grid_model.py +++ b/src/power_grid_model/core/power_grid_model.py @@ -21,8 +21,7 @@ from power_grid_model.core.error_handling import PowerGridBatchError, assert_no_error, handle_errors from power_grid_model.core.index_integer import IdNp, IdxNp from power_grid_model.core.options import Options -from power_grid_model.core.power_grid_core import ConstDatasetPtr, IDPtr, IdxPtr, ModelPtr -from power_grid_model.core.power_grid_core import power_grid_core as pgc +from power_grid_model.core.power_grid_core import ConstDatasetPtr, IDPtr, IdxPtr, ModelPtr, power_grid_core as pgc from power_grid_model.enum import CalculationMethod, CalculationType, ShortCircuitVoltageScaling diff --git a/src/power_grid_model/core/serialization.py b/src/power_grid_model/core/serialization.py index b1c9c6711..ef976bea4 100644 --- a/src/power_grid_model/core/serialization.py +++ b/src/power_grid_model/core/serialization.py @@ -16,8 +16,13 @@ from power_grid_model.core.error_handling import assert_no_error from power_grid_model.core.index_integer import IdxC -from power_grid_model.core.power_grid_core import CharPtr, DeserializerPtr, SerializerPtr, WritableDatasetPtr -from power_grid_model.core.power_grid_core import power_grid_core as pgc +from power_grid_model.core.power_grid_core import ( + CharPtr, + DeserializerPtr, + SerializerPtr, + WritableDatasetPtr, + power_grid_core as pgc, +) from power_grid_model.core.power_grid_dataset import CConstDataset, CWritableDataset from power_grid_model.errors import PowerGridSerializationError diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index b77d52d94..579290230 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -9,13 +9,14 @@ import json import warnings from pathlib import Path -from typing import Optional -from typing import cast as cast_type +from typing import Optional, cast as cast_type import numpy as np -from power_grid_model._utils import get_and_verify_batch_sizes as _get_and_verify_batch_sizes -from power_grid_model._utils import get_batch_size as _get_batch_size +from power_grid_model._utils import ( + get_and_verify_batch_sizes as _get_and_verify_batch_sizes, + get_batch_size as _get_batch_size, +) from power_grid_model.core.power_grid_dataset import get_dataset_type from power_grid_model.core.serialization import ( # pylint: disable=unused-import json_deserialize,