From 8905f0093ef2cb932e76674102c7e4815648d460 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Thu, 13 Jun 2024 18:01:04 +0200 Subject: [PATCH 01/47] Added dataset and component types. Signed-off-by: Santiago Figueroa --- src/power_grid_model/data_types.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/src/power_grid_model/data_types.py b/src/power_grid_model/data_types.py index 875cfb0e3..d946a67d6 100644 --- a/src/power_grid_model/data_types.py +++ b/src/power_grid_model/data_types.py @@ -7,10 +7,13 @@ have been defined and explained in this file. """ +from enum import Enum from typing import Dict, List, Tuple, Union import numpy as np +from power_grid_model import power_grid_meta_data + # When we're dropping python 3.8, we should introduce proper NumPy type hinting SingleArray = Union[np.ndarray] @@ -186,3 +189,30 @@ [{"line": [{"id": 3, "from_status": 0, "to_status": 0, ...}],}, {"line": [{"id": 3, "from_status": 1, "to_status": 1, ...}],}] """ + +# comply with mypy +PowerGridDataTypes = Enum( # type: ignore + "PowerGridDataTypes", {data_type: data_type for data_type in power_grid_meta_data} # type: ignore +) # type: ignore +""" +Types of single/batch datasets: + + - input: Dataset with attributes relevant to the grid configuration (e.g. id, from_node, from_status). + + - update: Dataset with attributes relevant to multiple scenarios (e.g. from_status, to_status). + + - sym_output: Dataset with attributes relevant to symmetrical steady state output of power flow or state estimation + calculation (e.g. p_from, p_to). + + - asym_output: Dataset with attributes relevant to asymmetrical steady state output of power flow or state estimation + calculation (e.g. p_from, p_to). + + - sc_output: Contains attributes relevant to symmetrical short circuit calculation output. Like for the asym_output, + detailed data for all 3 phases will be provided where relevant (e.g. i_from, i_from_angle). +""" + +# to comply with mypy +PowerGridComponents = Enum( # type: ignore + "PowerGridComponents", {component: component for component in power_grid_meta_data["input"]} # type: ignore +) # type: ignore +"""Grid component types.""" From ed55a936359aa995272bf10c54564f7a5bd477bc Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Mon, 17 Jun 2024 16:31:28 +0200 Subject: [PATCH 02/47] Generated code for dataset and component types. Signed-off-by: Santiago Figueroa --- code_generation/code_gen.py | 28 +++++++ .../power_grid_model/metadata_enums.py.jinja | 41 ++++++++++ src/power_grid_model/dataset_definitions.py | 77 +++++++++++++++++++ 3 files changed, 146 insertions(+) create mode 100644 code_generation/templates/src/power_grid_model/metadata_enums.py.jinja create mode 100644 src/power_grid_model/dataset_definitions.py diff --git a/code_generation/code_gen.py b/code_generation/code_gen.py index 73e6d3423..c2a490b45 100644 --- a/code_generation/code_gen.py +++ b/code_generation/code_gen.py @@ -117,10 +117,36 @@ def render_dataset_class_maps(self, template_path: Path, data_path: Path, output self.render_template(template_path=template_path, output_path=output_path, all_map=all_map) + def render_metadata_enums(self, template_path: Path, data_path: Path, output_path: Path): + with open(data_path) as data_file: + json_data = data_file.read() + dataset_meta_data: List[DatasetMapData] = AllDatasetMapData.schema().loads(json_data).all_datasets + + dataset_types = [] + components = [] + for dataset in dataset_meta_data: + if dataset.is_template: + prefixes = ["sym_", "asym_"] + else: + prefixes = [""] + for prefix in prefixes: + dataset_types.append(f"{prefix}{dataset.name}") + + if dataset.name == "input": + for component in dataset.components: + components.append(component.names) + + components = [name for sublist in components for name in sublist] + + self.render_template( + template_path=template_path, output_path=output_path, dataset_types=dataset_types, components=components + ) + def code_gen(self): render_funcs = { "attribute_classes": self.render_attribute_classes, "dataset_class_maps": self.render_dataset_class_maps, + "metadata_enums": self.render_metadata_enums, } # render attribute classes @@ -128,6 +154,8 @@ def code_gen(self): for template_path in TEMPLATE_DIR.rglob(f"{template_name}.*.jinja"): output_suffix = template_path.with_suffix("").suffix output_dir = template_path.parent.relative_to(TEMPLATE_DIR) + if template_name == "metadata_enums": + template_name = "dataset_class_maps" # To use existing data. for data_path in DATA_DIR.glob(f"{template_name}/*.json"): output_path = self.base_output_path / output_dir / data_path.with_suffix(output_suffix).name output_path.parent.mkdir(parents=True, exist_ok=True) diff --git a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja new file mode 100644 index 000000000..6fb62bcf4 --- /dev/null +++ b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja @@ -0,0 +1,41 @@ +# SPDX-FileCopyrightText: Contributors to the Power Grid Model project +# +# SPDX-License-Identifier: MPL-2.0 + +# This header file is automatically generated. DO NOT modify it manually! + +"""Enums for PMG dataset and component types.""" + +from enum import Enum + +# Value names are defined in lower case instead of upper case +# pylint: disable=invalid-name + +class PowerGridDataTypes(Enum): + """ + Types of single/batch datasets: + + - input: Dataset with attributes relevant to the grid configuration (e.g. id, from_node, from_status). + + - update: Dataset with attributes relevant to multiple scenarios (e.g. from_status, to_status). + + - sym_output: Dataset with attributes relevant to symmetrical steady state output of power flow or state + estimation calculation (e.g. p_from, p_to). + + - asym_output: Dataset with attributes relevant to asymmetrical steady state output of power flow or state + estimation calculation (e.g. p_from, p_to). + + - sc_output: Contains attributes relevant to symmetrical short circuit calculation output. Like for the + asym_output, detailed data for all 3 phases will be provided where relevant (e.g. i_from, i_from_angle). + """ + + {% for dataset_type in dataset_types %} + {{ dataset_type }} = "{{ dataset_type }}" + {% endfor %} + +class PowerGridComponents(Enum): + """Grid component types.""" + + {% for component in components %} + {{ component }} = "{{ component }}" + {% endfor %} diff --git a/src/power_grid_model/dataset_definitions.py b/src/power_grid_model/dataset_definitions.py new file mode 100644 index 000000000..74185a48c --- /dev/null +++ b/src/power_grid_model/dataset_definitions.py @@ -0,0 +1,77 @@ +# SPDX-FileCopyrightText: Contributors to the Power Grid Model project +# +# SPDX-License-Identifier: MPL-2.0 + +# This header file is automatically generated. DO NOT modify it manually! + +"""Enums for PMG dataset and component types.""" + +from enum import Enum + +# Value names are defined in lower case instead of upper case +# pylint: disable=invalid-name + + +class PowerGridDataTypes(Enum): + """ + Types of single/batch datasets: + + - input: Dataset with attributes relevant to the grid configuration (e.g. id, from_node, from_status). + + - update: Dataset with attributes relevant to multiple scenarios (e.g. from_status, to_status). + + - sym_output: Dataset with attributes relevant to symmetrical steady state output of power flow or state + estimation calculation (e.g. p_from, p_to). + + - asym_output: Dataset with attributes relevant to asymmetrical steady state output of power flow or state + estimation calculation (e.g. p_from, p_to). + + - sc_output: Contains attributes relevant to symmetrical short circuit calculation output. Like for the + asym_output, detailed data for all 3 phases will be provided where relevant (e.g. i_from, i_from_angle). + """ + + input = "input" + + sym_output = "sym_output" + + asym_output = "asym_output" + + update = "update" + + sc_output = "sc_output" + + +class PowerGridComponents(Enum): + """Grid component types.""" + + node = "node" + + line = "line" + + link = "link" + + transformer = "transformer" + + three_winding_transformer = "three_winding_transformer" + + sym_load = "sym_load" + + sym_gen = "sym_gen" + + asym_load = "asym_load" + + asym_gen = "asym_gen" + + shunt = "shunt" + + source = "source" + + sym_voltage_sensor = "sym_voltage_sensor" + + asym_voltage_sensor = "asym_voltage_sensor" + + sym_power_sensor = "sym_power_sensor" + + asym_power_sensor = "asym_power_sensor" + + fault = "fault" From e5c9f5daf1fcc59f1717f4080412b78c19093184 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Mon, 17 Jun 2024 16:33:42 +0200 Subject: [PATCH 03/47] Exposed in init. Signed-off-by: Santiago Figueroa --- src/power_grid_model/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/power_grid_model/__init__.py b/src/power_grid_model/__init__.py index f0126da1e..0d516b436 100644 --- a/src/power_grid_model/__init__.py +++ b/src/power_grid_model/__init__.py @@ -6,6 +6,7 @@ from power_grid_model.core.power_grid_meta import initialize_array, power_grid_meta_data from power_grid_model.core.power_grid_model import PowerGridModel +from power_grid_model.dataset_definitions import PowerGridComponents, PowerGridDataTypes from power_grid_model.enum import ( Branch3Side, BranchSide, From 3d0f3a16f9fca88047da3e844a47e37a5d34c69c Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Mon, 17 Jun 2024 21:57:05 +0200 Subject: [PATCH 04/47] (temporary) fix: import enum. Signed-off-by: Santiago Figueroa --- .../src/power_grid_model/metadata_enums.py.jinja | 9 +++++++++ src/power_grid_model/dataset_definitions.py | 8 ++++++++ 2 files changed, 17 insertions(+) diff --git a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja index 6fb62bcf4..e8752d70b 100644 --- a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja +++ b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja @@ -5,12 +5,21 @@ # This header file is automatically generated. DO NOT modify it manually! """Enums for PMG dataset and component types.""" +import sys +# pylint: disable=wrong-import-position + +sys_path = sys.path.pop(0) from enum import Enum +# pylint: enable=wrong-import-position + +sys.path.append(sys_path) + # Value names are defined in lower case instead of upper case # pylint: disable=invalid-name + class PowerGridDataTypes(Enum): """ Types of single/batch datasets: diff --git a/src/power_grid_model/dataset_definitions.py b/src/power_grid_model/dataset_definitions.py index 74185a48c..9963e63de 100644 --- a/src/power_grid_model/dataset_definitions.py +++ b/src/power_grid_model/dataset_definitions.py @@ -5,9 +5,17 @@ # This header file is automatically generated. DO NOT modify it manually! """Enums for PMG dataset and component types.""" +import sys +# pylint: disable=wrong-import-position + +sys_path = sys.path.pop(0) from enum import Enum +# pylint: enable=wrong-import-position + +sys.path.append(sys_path) + # Value names are defined in lower case instead of upper case # pylint: disable=invalid-name From dfea0500098896adfb95e243344209d0d9a43cc2 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 18 Jun 2024 09:10:37 +0200 Subject: [PATCH 05/47] Removed Enum, added Literal (mypy supported) Signed-off-by: Santiago Figueroa --- code_generation/code_gen.py | 4 +- .../power_grid_model/metadata_enums.py.jinja | 53 ++------- src/power_grid_model/dataset_definitions.py | 109 +++++------------- 3 files changed, 44 insertions(+), 122 deletions(-) diff --git a/code_generation/code_gen.py b/code_generation/code_gen.py index c2a490b45..9234ccd84 100644 --- a/code_generation/code_gen.py +++ b/code_generation/code_gen.py @@ -117,7 +117,7 @@ def render_dataset_class_maps(self, template_path: Path, data_path: Path, output self.render_template(template_path=template_path, output_path=output_path, all_map=all_map) - def render_metadata_enums(self, template_path: Path, data_path: Path, output_path: Path): + def render_metadata_types(self, template_path: Path, data_path: Path, output_path: Path): with open(data_path) as data_file: json_data = data_file.read() dataset_meta_data: List[DatasetMapData] = AllDatasetMapData.schema().loads(json_data).all_datasets @@ -146,7 +146,7 @@ def code_gen(self): render_funcs = { "attribute_classes": self.render_attribute_classes, "dataset_class_maps": self.render_dataset_class_maps, - "metadata_enums": self.render_metadata_enums, + "metadata_enums": self.render_metadata_types, } # render attribute classes diff --git a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja index e8752d70b..94c91d990 100644 --- a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja +++ b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja @@ -4,47 +4,18 @@ # This header file is automatically generated. DO NOT modify it manually! -"""Enums for PMG dataset and component types.""" -import sys +"""Data types for power grid model dataset and component types.""" -# pylint: disable=wrong-import-position +from typing import Literal -sys_path = sys.path.pop(0) -from enum import Enum +PowerGridDataTypes = Literal[ +{% for dataset_type in dataset_types %} + "{{ dataset_type }}", +{% endfor %} +] -# pylint: enable=wrong-import-position - -sys.path.append(sys_path) - -# Value names are defined in lower case instead of upper case -# pylint: disable=invalid-name - - -class PowerGridDataTypes(Enum): - """ - Types of single/batch datasets: - - - input: Dataset with attributes relevant to the grid configuration (e.g. id, from_node, from_status). - - - update: Dataset with attributes relevant to multiple scenarios (e.g. from_status, to_status). - - - sym_output: Dataset with attributes relevant to symmetrical steady state output of power flow or state - estimation calculation (e.g. p_from, p_to). - - - asym_output: Dataset with attributes relevant to asymmetrical steady state output of power flow or state - estimation calculation (e.g. p_from, p_to). - - - sc_output: Contains attributes relevant to symmetrical short circuit calculation output. Like for the - asym_output, detailed data for all 3 phases will be provided where relevant (e.g. i_from, i_from_angle). - """ - - {% for dataset_type in dataset_types %} - {{ dataset_type }} = "{{ dataset_type }}" - {% endfor %} - -class PowerGridComponents(Enum): - """Grid component types.""" - - {% for component in components %} - {{ component }} = "{{ component }}" - {% endfor %} +PowerGridComponents = Literal[ +{% for component in components %} + "{{ component }}", +{% endfor %} +] diff --git a/src/power_grid_model/dataset_definitions.py b/src/power_grid_model/dataset_definitions.py index 9963e63de..9c2fa879f 100644 --- a/src/power_grid_model/dataset_definitions.py +++ b/src/power_grid_model/dataset_definitions.py @@ -4,82 +4,33 @@ # This header file is automatically generated. DO NOT modify it manually! -"""Enums for PMG dataset and component types.""" -import sys - -# pylint: disable=wrong-import-position - -sys_path = sys.path.pop(0) -from enum import Enum - -# pylint: enable=wrong-import-position - -sys.path.append(sys_path) - -# Value names are defined in lower case instead of upper case -# pylint: disable=invalid-name - - -class PowerGridDataTypes(Enum): - """ - Types of single/batch datasets: - - - input: Dataset with attributes relevant to the grid configuration (e.g. id, from_node, from_status). - - - update: Dataset with attributes relevant to multiple scenarios (e.g. from_status, to_status). - - - sym_output: Dataset with attributes relevant to symmetrical steady state output of power flow or state - estimation calculation (e.g. p_from, p_to). - - - asym_output: Dataset with attributes relevant to asymmetrical steady state output of power flow or state - estimation calculation (e.g. p_from, p_to). - - - sc_output: Contains attributes relevant to symmetrical short circuit calculation output. Like for the - asym_output, detailed data for all 3 phases will be provided where relevant (e.g. i_from, i_from_angle). - """ - - input = "input" - - sym_output = "sym_output" - - asym_output = "asym_output" - - update = "update" - - sc_output = "sc_output" - - -class PowerGridComponents(Enum): - """Grid component types.""" - - node = "node" - - line = "line" - - link = "link" - - transformer = "transformer" - - three_winding_transformer = "three_winding_transformer" - - sym_load = "sym_load" - - sym_gen = "sym_gen" - - asym_load = "asym_load" - - asym_gen = "asym_gen" - - shunt = "shunt" - - source = "source" - - sym_voltage_sensor = "sym_voltage_sensor" - - asym_voltage_sensor = "asym_voltage_sensor" - - sym_power_sensor = "sym_power_sensor" - - asym_power_sensor = "asym_power_sensor" - - fault = "fault" +"""Data types for power grid model dataset and component types.""" + +from typing import Literal + +PowerGridDataTypes = Literal[ + "input", + "sym_output", + "asym_output", + "update", + "sc_output", +] + +PowerGridComponents = Literal[ + "node", + "line", + "link", + "transformer", + "three_winding_transformer", + "sym_load", + "sym_gen", + "asym_load", + "asym_gen", + "shunt", + "source", + "sym_voltage_sensor", + "asym_voltage_sensor", + "sym_power_sensor", + "asym_power_sensor", + "fault", +] From f42085a96c12b41f4c4ad5adab1f1c2cb0402931 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 18 Jun 2024 09:34:14 +0200 Subject: [PATCH 06/47] Updated functions where data_type is used. Signed-off-by: Santiago Figueroa --- src/power_grid_model/utils.py | 10 +++++++--- src/power_grid_model/validation/utils.py | 3 ++- src/power_grid_model/validation/validation.py | 3 ++- tests/unit/utils.py | 3 ++- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index a54d7815e..20c1b7b4f 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -25,6 +25,7 @@ msgpack_serialize, ) from power_grid_model.data_types import BatchArray, BatchDataset, Dataset, SingleDataset +from power_grid_model.dataset_definitions import PowerGridDataTypes from power_grid_model.errors import PowerGridSerializationError _DEPRECATED_FUNCTION_MSG = "This function is deprecated." @@ -190,7 +191,7 @@ def import_json_data(json_file: Path, data_type: str, *args, **kwargs) -> Datase if kwargs: warnings.warn(f"Provided keyword arguments {list(kwargs.keys())} are deprecated.", DeprecationWarning) - return _compatibility_deprecated_import_json_data(json_file=json_file, data_type=data_type) + return _compatibility_deprecated_import_json_data(json_file=json_file, data_type=data_type) # type: ignore def export_json_data( @@ -274,10 +275,13 @@ def import_update_data(json_file: Path) -> BatchDataset: """ warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning) - return cast_type(BatchDataset, _compatibility_deprecated_import_json_data(json_file=json_file, data_type="update")) + return cast_type( + BatchDataset, + _compatibility_deprecated_import_json_data(json_file=json_file, data_type="update"), + ) -def _compatibility_deprecated_import_json_data(json_file: Path, data_type: str): +def _compatibility_deprecated_import_json_data(json_file: Path, data_type: PowerGridDataTypes): with open(json_file, mode="r", encoding="utf-8") as file_pointer: data = json.load(file_pointer) diff --git a/src/power_grid_model/validation/utils.py b/src/power_grid_model/validation/utils.py index 3087aca75..efd7e9c14 100644 --- a/src/power_grid_model/validation/utils.py +++ b/src/power_grid_model/validation/utils.py @@ -12,6 +12,7 @@ from power_grid_model import power_grid_meta_data from power_grid_model.data_types import SingleDataset +from power_grid_model.dataset_definitions import PowerGridDataTypes from power_grid_model.validation.errors import ValidationError @@ -161,7 +162,7 @@ def errors_to_string( return msg -def nan_type(component: str, field: str, data_type="input"): +def nan_type(component: str, field: str, data_type: PowerGridDataTypes = "input"): """ Helper function to retrieve the nan value for a certain field as defined in the power_grid_meta_data. """ diff --git a/src/power_grid_model/validation/validation.py b/src/power_grid_model/validation/validation.py index b9a5e1166..b3d58a925 100644 --- a/src/power_grid_model/validation/validation.py +++ b/src/power_grid_model/validation/validation.py @@ -18,6 +18,7 @@ from power_grid_model import power_grid_meta_data from power_grid_model._utils import convert_batch_dataset_to_batch_list from power_grid_model.data_types import BatchDataset, Dataset, SingleDataset +from power_grid_model.dataset_definitions import PowerGridDataTypes from power_grid_model.enum import ( Branch3Side, BranchSide, @@ -151,7 +152,7 @@ def validate_batch_data( return errors if errors else None -def assert_valid_data_structure(data: Dataset, data_type: str) -> None: +def assert_valid_data_structure(data: Dataset, data_type: PowerGridDataTypes) -> None: """ Checks if all component names are valid and if the data inside the component matches the required Numpy structured array as defined in the Power Grid Model meta data. diff --git a/tests/unit/utils.py b/tests/unit/utils.py index 486753393..251ede211 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -13,6 +13,7 @@ from power_grid_model.core.power_grid_model import PowerGridModel from power_grid_model.data_types import Dataset, PythonDataset, SingleDataset +from power_grid_model.dataset_definitions import PowerGridDataTypes from power_grid_model.errors import ( AutomaticTapCalculationError, ConflictID, @@ -284,7 +285,7 @@ def compare_result(actual: SingleDataset, expected: SingleDataset, rtol: float, ) -def convert_python_to_numpy(data: PythonDataset, data_type: str) -> Dataset: +def convert_python_to_numpy(data: PythonDataset, data_type: PowerGridDataTypes) -> Dataset: """ Convert native python data to internal numpy From bddd75b080924bbf74f0cef272f90c58eeab7304 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 18 Jun 2024 14:14:36 +0200 Subject: [PATCH 07/47] Implemented Literal+Enum for the datatypes. Signed-off-by: Santiago Figueroa --- .../power_grid_model/metadata_enums.py.jinja | 43 ++++++++-- src/power_grid_model/dataset_definitions.py | 80 +++++++++++++++++-- 2 files changed, 113 insertions(+), 10 deletions(-) diff --git a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja index 94c91d990..2bbcb93d8 100644 --- a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja +++ b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja @@ -2,20 +2,53 @@ # # SPDX-License-Identifier: MPL-2.0 -# This header file is automatically generated. DO NOT modify it manually! - """Data types for power grid model dataset and component types.""" -from typing import Literal +# This file is automatically generated. DO NOT modify it manually! + +import sys +from typing import Literal, TypeAlias, Union + +# To avoid conflicts with src/power_grid_model/enum.py +# pylint: disable=wrong-import-position + +sys_path = sys.path.pop(0) +from enum import Enum + +# pylint: enable=wrong-import-position + +sys.path.append(sys_path) + +# Value names are defined in lower case instead of upper case +# pylint: disable=invalid-name -PowerGridDataTypes = Literal[ + +class PowerGridDataTypes(Enum): + """Single/batch dataset types.""" + + {% for dataset_type in dataset_types %} + {{ dataset_type }} = "{{ dataset_type }}" + {% endfor %} + +PowerGridDataTypesLiteral = Literal[ {% for dataset_type in dataset_types %} "{{ dataset_type }}", {% endfor %} ] -PowerGridComponents = Literal[ +PowerGridDataType: TypeAlias = Union[PowerGridDataTypes, PowerGridDataTypesLiteral] + +class PowerGridComponents(Enum): + """Grid component types.""" + + {% for component in components %} + {{ component }} = "{{ component }}" + {% endfor %} + +PowerGridComponentsLiteral = Literal[ {% for component in components %} "{{ component }}", {% endfor %} ] + +PowerGridComponent: TypeAlias = Union[PowerGridComponents, PowerGridComponentsLiteral] diff --git a/src/power_grid_model/dataset_definitions.py b/src/power_grid_model/dataset_definitions.py index 9c2fa879f..15facc670 100644 --- a/src/power_grid_model/dataset_definitions.py +++ b/src/power_grid_model/dataset_definitions.py @@ -2,13 +2,42 @@ # # SPDX-License-Identifier: MPL-2.0 -# This header file is automatically generated. DO NOT modify it manually! - """Data types for power grid model dataset and component types.""" -from typing import Literal +# This file is automatically generated. DO NOT modify it manually! + +import sys +from typing import Literal, TypeAlias, Union + +# To avoid conflicts with src/power_grid_model/enum.py +# pylint: disable=wrong-import-position + +sys_path = sys.path.pop(0) +from enum import Enum + +# pylint: enable=wrong-import-position + +sys.path.append(sys_path) + +# Value names are defined in lower case instead of upper case +# pylint: disable=invalid-name + + +class PowerGridDataTypes(Enum): + """Single/batch dataset types.""" -PowerGridDataTypes = Literal[ + input = "input" + + sym_output = "sym_output" + + asym_output = "asym_output" + + update = "update" + + sc_output = "sc_output" + + +PowerGridDataTypesLiteral = Literal[ "input", "sym_output", "asym_output", @@ -16,7 +45,46 @@ "sc_output", ] -PowerGridComponents = Literal[ +PowerGridDataType: TypeAlias = Union[PowerGridDataTypes, PowerGridDataTypesLiteral] + + +class PowerGridComponents(Enum): + """Grid component types.""" + + node = "node" + + line = "line" + + link = "link" + + transformer = "transformer" + + three_winding_transformer = "three_winding_transformer" + + sym_load = "sym_load" + + sym_gen = "sym_gen" + + asym_load = "asym_load" + + asym_gen = "asym_gen" + + shunt = "shunt" + + source = "source" + + sym_voltage_sensor = "sym_voltage_sensor" + + asym_voltage_sensor = "asym_voltage_sensor" + + sym_power_sensor = "sym_power_sensor" + + asym_power_sensor = "asym_power_sensor" + + fault = "fault" + + +PowerGridComponentsLiteral = Literal[ "node", "line", "link", @@ -34,3 +102,5 @@ "asym_power_sensor", "fault", ] + +PowerGridComponent: TypeAlias = Union[PowerGridComponents, PowerGridComponentsLiteral] From 3d03fe1de16e3585a8e7562e24ef72f241191b5e Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 18 Jun 2024 14:16:16 +0200 Subject: [PATCH 08/47] Re exposed alias datatypes. Signed-off-by: Santiago Figueroa --- src/power_grid_model/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/power_grid_model/__init__.py b/src/power_grid_model/__init__.py index 0d516b436..9c4100d23 100644 --- a/src/power_grid_model/__init__.py +++ b/src/power_grid_model/__init__.py @@ -6,7 +6,7 @@ from power_grid_model.core.power_grid_meta import initialize_array, power_grid_meta_data from power_grid_model.core.power_grid_model import PowerGridModel -from power_grid_model.dataset_definitions import PowerGridComponents, PowerGridDataTypes +from power_grid_model.dataset_definitions import PowerGridComponent, PowerGridDataType from power_grid_model.enum import ( Branch3Side, BranchSide, From 7c9284c601bed830e5ca0ecf108fcf046d481c1b Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 18 Jun 2024 14:44:49 +0200 Subject: [PATCH 09/47] Fixed correct PowerGridDataTypes. Signed-off-by: Santiago Figueroa --- src/power_grid_model/core/power_grid_meta.py | 7 +++++-- src/power_grid_model/utils.py | 4 ++-- src/power_grid_model/validation/utils.py | 4 ++-- src/power_grid_model/validation/validation.py | 4 ++-- tests/unit/utils.py | 4 ++-- 5 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/power_grid_model/core/power_grid_meta.py b/src/power_grid_model/core/power_grid_meta.py index 75012388d..6770bb830 100644 --- a/src/power_grid_model/core/power_grid_meta.py +++ b/src/power_grid_model/core/power_grid_meta.py @@ -13,6 +13,7 @@ import numpy as np from power_grid_model.core.power_grid_core import AttributePtr, ComponentPtr, DatasetPtr, power_grid_core as pgc +from power_grid_model.dataset_definitions import PowerGridDataType # constant enum for ctype @@ -61,7 +62,7 @@ def __getitem__(self, item): DatasetMetaData = Dict[str, ComponentMetaData] -PowerGridMetaData = Dict[str, DatasetMetaData] +PowerGridMetaData = Dict[PowerGridDataType, DatasetMetaData] def _generate_meta_data() -> PowerGridMetaData: @@ -156,7 +157,9 @@ def _generate_meta_attributes(component: ComponentPtr) -> dict: power_grid_meta_data = _generate_meta_data() -def initialize_array(data_type: str, component_type: str, shape: Union[tuple, int], empty: bool = False) -> np.ndarray: +def initialize_array( + data_type: PowerGridDataType, component_type: str, shape: Union[tuple, int], empty: bool = False +) -> np.ndarray: """ Initializes an array for use in Power Grid Model calculations diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index 20c1b7b4f..192e52796 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -25,7 +25,7 @@ msgpack_serialize, ) from power_grid_model.data_types import BatchArray, BatchDataset, Dataset, SingleDataset -from power_grid_model.dataset_definitions import PowerGridDataTypes +from power_grid_model.dataset_definitions import PowerGridDataType from power_grid_model.errors import PowerGridSerializationError _DEPRECATED_FUNCTION_MSG = "This function is deprecated." @@ -281,7 +281,7 @@ def import_update_data(json_file: Path) -> BatchDataset: ) -def _compatibility_deprecated_import_json_data(json_file: Path, data_type: PowerGridDataTypes): +def _compatibility_deprecated_import_json_data(json_file: Path, data_type: PowerGridDataType): with open(json_file, mode="r", encoding="utf-8") as file_pointer: data = json.load(file_pointer) diff --git a/src/power_grid_model/validation/utils.py b/src/power_grid_model/validation/utils.py index efd7e9c14..dc52421c7 100644 --- a/src/power_grid_model/validation/utils.py +++ b/src/power_grid_model/validation/utils.py @@ -12,7 +12,7 @@ from power_grid_model import power_grid_meta_data from power_grid_model.data_types import SingleDataset -from power_grid_model.dataset_definitions import PowerGridDataTypes +from power_grid_model.dataset_definitions import PowerGridDataType from power_grid_model.validation.errors import ValidationError @@ -162,7 +162,7 @@ def errors_to_string( return msg -def nan_type(component: str, field: str, data_type: PowerGridDataTypes = "input"): +def nan_type(component: str, field: str, data_type: PowerGridDataType = "input"): """ Helper function to retrieve the nan value for a certain field as defined in the power_grid_meta_data. """ diff --git a/src/power_grid_model/validation/validation.py b/src/power_grid_model/validation/validation.py index b3d58a925..9a3c47f45 100644 --- a/src/power_grid_model/validation/validation.py +++ b/src/power_grid_model/validation/validation.py @@ -18,7 +18,7 @@ from power_grid_model import power_grid_meta_data from power_grid_model._utils import convert_batch_dataset_to_batch_list from power_grid_model.data_types import BatchDataset, Dataset, SingleDataset -from power_grid_model.dataset_definitions import PowerGridDataTypes +from power_grid_model.dataset_definitions import PowerGridDataType from power_grid_model.enum import ( Branch3Side, BranchSide, @@ -152,7 +152,7 @@ def validate_batch_data( return errors if errors else None -def assert_valid_data_structure(data: Dataset, data_type: PowerGridDataTypes) -> None: +def assert_valid_data_structure(data: Dataset, data_type: PowerGridDataType) -> None: """ Checks if all component names are valid and if the data inside the component matches the required Numpy structured array as defined in the Power Grid Model meta data. diff --git a/tests/unit/utils.py b/tests/unit/utils.py index 251ede211..b5e9e080a 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -13,7 +13,7 @@ from power_grid_model.core.power_grid_model import PowerGridModel from power_grid_model.data_types import Dataset, PythonDataset, SingleDataset -from power_grid_model.dataset_definitions import PowerGridDataTypes +from power_grid_model.dataset_definitions import PowerGridDataType from power_grid_model.errors import ( AutomaticTapCalculationError, ConflictID, @@ -285,7 +285,7 @@ def compare_result(actual: SingleDataset, expected: SingleDataset, rtol: float, ) -def convert_python_to_numpy(data: PythonDataset, data_type: PowerGridDataTypes) -> Dataset: +def convert_python_to_numpy(data: PythonDataset, data_type: PowerGridDataType) -> Dataset: """ Convert native python data to internal numpy From 4a0cdabdc1a73505e4613f6a4bae27379096f739 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 18 Jun 2024 16:05:43 +0200 Subject: [PATCH 10/47] dataset_types -> PowerGridDataType Signed-off-by: Santiago Figueroa --- src/power_grid_model/core/power_grid_dataset.py | 9 +++++---- src/power_grid_model/core/serialization.py | 11 ++++++----- src/power_grid_model/utils.py | 4 ++-- 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/src/power_grid_model/core/power_grid_dataset.py b/src/power_grid_model/core/power_grid_dataset.py index edd413604..ca05cf80c 100644 --- a/src/power_grid_model/core/power_grid_dataset.py +++ b/src/power_grid_model/core/power_grid_dataset.py @@ -26,6 +26,7 @@ power_grid_core as pgc, ) from power_grid_model.core.power_grid_meta import DatasetMetaData, power_grid_meta_data +from power_grid_model.dataset_definitions import PowerGridDataType from power_grid_model.errors import PowerGridError @@ -119,7 +120,7 @@ def total_elements(self) -> Mapping[str, int]: } -def get_dataset_type(data: Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]) -> str: +def get_dataset_type(data: Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]) -> PowerGridDataType: """ Deduce the dataset type from the provided dataset. @@ -174,7 +175,7 @@ class CMutableDataset: The dataset will create mutable buffers that the Power Grid Model can use to load data. """ - _dataset_type: str + _dataset_type: PowerGridDataType _schema: DatasetMetaData _is_batch: bool _batch_size: int @@ -184,7 +185,7 @@ class CMutableDataset: def __new__( cls, data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], - dataset_type: Optional[str] = None, + dataset_type: Optional[PowerGridDataType] = None, ): instance = super().__new__(cls) instance._mutable_dataset = MutableDatasetPtr() @@ -322,7 +323,7 @@ class CConstDataset: def __new__( cls, data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], - dataset_type: Optional[str] = None, + dataset_type: Optional[PowerGridDataType] = None, ): instance = super().__new__(cls) instance._const_dataset = ConstDatasetPtr() diff --git a/src/power_grid_model/core/serialization.py b/src/power_grid_model/core/serialization.py index 3efb7a1d8..bc5c50b0a 100644 --- a/src/power_grid_model/core/serialization.py +++ b/src/power_grid_model/core/serialization.py @@ -23,6 +23,7 @@ power_grid_core as pgc, ) from power_grid_model.core.power_grid_dataset import CConstDataset, CWritableDataset +from power_grid_model.dataset_definitions import PowerGridDataType from power_grid_model.errors import PowerGridSerializationError @@ -91,7 +92,7 @@ def __new__( cls, data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], serialization_type: SerializationType, - dataset_type: Optional[str] = None, + dataset_type: Optional[PowerGridDataType] = None, ): instance = super().__new__(cls) @@ -198,7 +199,7 @@ class JsonSerializer(_StringSerializer): # pylint: disable=too-few-public-metho def __new__( cls, data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], - dataset_type: Optional[str] = None, + dataset_type: Optional[PowerGridDataType] = None, ): return super().__new__(cls, data, SerializationType.JSON, dataset_type=dataset_type) @@ -211,7 +212,7 @@ class MsgpackSerializer(_BytesSerializer): # pylint: disable=too-few-public-met def __new__( cls, data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], - dataset_type: Optional[str] = None, + dataset_type: Optional[PowerGridDataType] = None, ): return super().__new__(cls, data, SerializationType.MSGPACK, dataset_type=dataset_type) @@ -237,7 +238,7 @@ def json_deserialize(data: Union[str, bytes]) -> Dict[str, Union[np.ndarray, Dic def json_serialize( data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], - dataset_type: Optional[str] = None, + dataset_type: Optional[PowerGridDataType] = None, use_compact_list: bool = False, indent: int = 2, ) -> str: @@ -287,7 +288,7 @@ def msgpack_deserialize(data: bytes) -> Dict[str, Union[np.ndarray, Dict[str, np def msgpack_serialize( data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], - dataset_type: Optional[str] = None, + dataset_type: Optional[PowerGridDataType] = None, use_compact_list: bool = False, ) -> bytes: """ diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index 192e52796..6aa75eb5b 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -108,7 +108,7 @@ def json_deserialize_from_file(file_path: Path) -> Dataset: def json_serialize_to_file( file_path: Path, data: Dataset, - dataset_type: Optional[str] = None, + dataset_type: Optional[PowerGridDataType] = None, use_compact_list: bool = False, indent: Optional[int] = 2, ): @@ -151,7 +151,7 @@ def msgpack_deserialize_from_file(file_path: Path) -> Dataset: def msgpack_serialize_to_file( - file_path: Path, data: Dataset, dataset_type: Optional[str] = None, use_compact_list: bool = False + file_path: Path, data: Dataset, dataset_type: Optional[PowerGridDataType] = None, use_compact_list: bool = False ): """ Export msgpack data in most recent format. From 5c4b69efe35ee4c32bb8325a3e39915cc7f7a784 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Wed, 19 Jun 2024 00:01:31 +0200 Subject: [PATCH 11/47] Updated component_type: PowerGridComponent Signed-off-by: Santiago Figueroa --- src/power_grid_model/core/buffer_handling.py | 5 +- src/power_grid_model/core/data_handling.py | 19 ++++--- .../core/power_grid_dataset.py | 45 ++++++++++----- src/power_grid_model/core/power_grid_meta.py | 6 +- src/power_grid_model/core/power_grid_model.py | 57 ++++++++++--------- 5 files changed, 78 insertions(+), 54 deletions(-) diff --git a/src/power_grid_model/core/buffer_handling.py b/src/power_grid_model/core/buffer_handling.py index 4d4e690cf..9fa4370c6 100644 --- a/src/power_grid_model/core/buffer_handling.py +++ b/src/power_grid_model/core/buffer_handling.py @@ -16,6 +16,7 @@ from power_grid_model.core.index_integer import IdxC, IdxNp from power_grid_model.core.power_grid_core import IdxPtr, VoidPtr from power_grid_model.core.power_grid_meta import ComponentMetaData +from power_grid_model.dataset_definitions import PowerGridComponent @dataclass @@ -230,7 +231,9 @@ def get_buffer_view(data: Union[np.ndarray, Mapping[str, np.ndarray]], schema: C return _get_sparse_buffer_view(data, schema) -def create_buffer(properties: BufferProperties, schema: ComponentMetaData) -> Union[np.ndarray, Dict[str, np.ndarray]]: +def create_buffer( + properties: BufferProperties, schema: ComponentMetaData +) -> Union[np.ndarray, Dict[PowerGridComponent, np.ndarray]]: """ Create a buffer with the provided properties and type. diff --git a/src/power_grid_model/core/data_handling.py b/src/power_grid_model/core/data_handling.py index 833ee36b1..00bbd446d 100644 --- a/src/power_grid_model/core/data_handling.py +++ b/src/power_grid_model/core/data_handling.py @@ -14,6 +14,7 @@ from power_grid_model.core.power_grid_dataset import CConstDataset, CMutableDataset from power_grid_model.core.power_grid_meta import initialize_array, power_grid_meta_data +from power_grid_model.dataset_definitions import PowerGridComponent from power_grid_model.enum import CalculationType @@ -52,7 +53,7 @@ def get_output_type(*, calculation_type: CalculationType, symmetric: bool) -> Ou raise NotImplementedError() -def prepare_input_view(input_data: Mapping[str, np.ndarray]) -> CConstDataset: +def prepare_input_view(input_data: Mapping[PowerGridComponent, np.ndarray]) -> CConstDataset: """ Create a view of the input data in a format compatible with the PGM core libary. @@ -66,7 +67,9 @@ def prepare_input_view(input_data: Mapping[str, np.ndarray]) -> CConstDataset: return CConstDataset(input_data, dataset_type="input") -def prepare_update_view(update_data: Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]) -> CConstDataset: +def prepare_update_view( + update_data: Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]] +) -> CConstDataset: """ Create a view of the update data, or an empty view if not provided, in a format compatible with the PGM core libary. @@ -80,7 +83,9 @@ def prepare_update_view(update_data: Mapping[str, Union[np.ndarray, Mapping[str, return CConstDataset(update_data, dataset_type="update") -def prepare_output_view(output_data: Mapping[str, np.ndarray], output_type: OutputType) -> CMutableDataset: +def prepare_output_view( + output_data: Mapping[PowerGridComponent, np.ndarray], output_type: OutputType +) -> CMutableDataset: """ create a view of the output data in a format compatible with the PGM core libary. @@ -97,12 +102,12 @@ def prepare_output_view(output_data: Mapping[str, np.ndarray], output_type: Outp def create_output_data( - output_component_types: Union[Set[str], List[str]], + output_component_types: Union[Set[PowerGridComponent], List[PowerGridComponent]], output_type: OutputType, - all_component_count: Dict[str, int], + all_component_count: Dict[PowerGridComponent, int], is_batch: bool, batch_size: int, -) -> Dict[str, np.ndarray]: +) -> Dict[PowerGridComponent, np.ndarray]: """ Create the output data that the user can use. always returns batch type output data. Use reduce_output_data to flatten to single scenario output if applicable. @@ -147,6 +152,6 @@ def create_output_data( shape: Union[Tuple[int], Tuple[int, int]] = (batch_size, count) else: shape = (count,) - result_dict[name] = initialize_array(output_type.value, name, shape=shape, empty=True) + result_dict[name] = initialize_array(output_type.value, name, shape=shape, empty=True) # return result_dict diff --git a/src/power_grid_model/core/power_grid_dataset.py b/src/power_grid_model/core/power_grid_dataset.py index ca05cf80c..b3268b28a 100644 --- a/src/power_grid_model/core/power_grid_dataset.py +++ b/src/power_grid_model/core/power_grid_dataset.py @@ -26,7 +26,7 @@ power_grid_core as pgc, ) from power_grid_model.core.power_grid_meta import DatasetMetaData, power_grid_meta_data -from power_grid_model.dataset_definitions import PowerGridDataType +from power_grid_model.dataset_definitions import PowerGridComponent, PowerGridDataType from power_grid_model.errors import PowerGridError @@ -83,7 +83,7 @@ def n_components(self) -> int: """ return pgc.dataset_info_n_components(self._info) - def components(self) -> List[str]: + def components(self) -> List[PowerGridComponent]: """ The components in the dataset. @@ -92,7 +92,7 @@ def components(self) -> List[str]: """ return [pgc.dataset_info_component_name(self._info, idx) for idx in range(self.n_components())] - def elements_per_scenario(self) -> Mapping[str, int]: + def elements_per_scenario(self) -> Mapping[PowerGridComponent, int]: """ The number of elements per scenario in the dataset. @@ -105,7 +105,7 @@ def elements_per_scenario(self) -> Mapping[str, int]: for idx, component_name in enumerate(self.components()) } - def total_elements(self) -> Mapping[str, int]: + def total_elements(self) -> Mapping[PowerGridComponent, int]: """ The total number of elements in the dataset. @@ -120,7 +120,9 @@ def total_elements(self) -> Mapping[str, int]: } -def get_dataset_type(data: Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]) -> PowerGridDataType: +def get_dataset_type( + data: Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]] +) -> PowerGridDataType: """ Deduce the dataset type from the provided dataset. @@ -184,7 +186,10 @@ class CMutableDataset: def __new__( cls, - data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], + data: Union[ + Mapping[PowerGridComponent, np.ndarray], + Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + ], dataset_type: Optional[PowerGridDataType] = None, ): instance = super().__new__(cls) @@ -240,7 +245,11 @@ def get_buffer_views(self) -> List[CBuffer]: return self._buffer_views def _add_data( - self, data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]] + self, + data: Union[ + Mapping[PowerGridComponent, np.ndarray], + Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + ], ): """ Add Power Grid Model data to the mutable dataset view. @@ -257,7 +266,10 @@ def _add_data( self._add_component_data(component, component_data, allow_unknown=False) def _add_component_data( - self, component: str, data: Union[np.ndarray, Mapping[str, np.ndarray]], allow_unknown: bool = False + self, + component: PowerGridComponent, + data: Union[np.ndarray, Mapping[str, np.ndarray]], + allow_unknown: bool = False, ): """ Add Power Grid Model data for a single component to the mutable dataset view. @@ -322,7 +334,10 @@ class CConstDataset: def __new__( cls, - data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], + data: Union[ + Mapping[PowerGridComponent, np.ndarray], + Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + ], dataset_type: Optional[PowerGridDataType] = None, ): instance = super().__new__(cls) @@ -376,7 +391,7 @@ def __init__(self, dataset_ptr: WritableDatasetPtr): self._schema = power_grid_meta_data[self._dataset_type] self._component_buffer_properties = self._get_buffer_properties(info) - self._data: Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]] = {} + self._data: Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]] = {} self._buffers: Mapping[str, CBuffer] = {} self._add_buffers() @@ -400,7 +415,7 @@ def get_info(self) -> CDatasetInfo: """ return CDatasetInfo(pgc.dataset_writable_get_info(self._writable_dataset)) - def get_data(self) -> Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]: + def get_data(self) -> Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]: """ Retrieve data from the Power Grid Model dataset. @@ -411,7 +426,7 @@ def get_data(self) -> Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]: """ return self._data - def get_component_data(self, component: str) -> Union[np.ndarray, Mapping[str, np.ndarray]]: + def get_component_data(self, component: PowerGridComponent) -> Union[np.ndarray, Mapping[str, np.ndarray]]: """ Retrieve Power Grid Model data from the dataset for a specific component. @@ -427,20 +442,20 @@ def _add_buffers(self): for component, buffer_properties in self._component_buffer_properties.items(): self._add_buffer(component, buffer_properties) - def _add_buffer(self, component: str, buffer_properties: BufferProperties): + def _add_buffer(self, component: PowerGridComponent, buffer_properties: BufferProperties): schema = self._schema[component] self._data[component] = create_buffer(buffer_properties, schema) self._register_buffer(component, get_buffer_view(self._data[component], schema)) - def _register_buffer(self, component: str, buffer: CBuffer): + def _register_buffer(self, component: PowerGridComponent, buffer: CBuffer): pgc.dataset_writable_set_buffer( dataset=self._writable_dataset, component=component, indptr=buffer.indptr, data=buffer.data ) assert_no_error() @staticmethod - def _get_buffer_properties(info: CDatasetInfo) -> Mapping[str, BufferProperties]: + def _get_buffer_properties(info: CDatasetInfo) -> Mapping[PowerGridComponent, BufferProperties]: is_batch = info.is_batch() batch_size = info.batch_size() components = info.components() diff --git a/src/power_grid_model/core/power_grid_meta.py b/src/power_grid_model/core/power_grid_meta.py index 6770bb830..0f45d93d9 100644 --- a/src/power_grid_model/core/power_grid_meta.py +++ b/src/power_grid_model/core/power_grid_meta.py @@ -13,7 +13,7 @@ import numpy as np from power_grid_model.core.power_grid_core import AttributePtr, ComponentPtr, DatasetPtr, power_grid_core as pgc -from power_grid_model.dataset_definitions import PowerGridDataType +from power_grid_model.dataset_definitions import PowerGridComponent, PowerGridDataType # constant enum for ctype @@ -61,7 +61,7 @@ def __getitem__(self, item): return getattr(self, item) -DatasetMetaData = Dict[str, ComponentMetaData] +DatasetMetaData = Dict[PowerGridComponent, ComponentMetaData] PowerGridMetaData = Dict[PowerGridDataType, DatasetMetaData] @@ -158,7 +158,7 @@ def _generate_meta_attributes(component: ComponentPtr) -> dict: def initialize_array( - data_type: PowerGridDataType, component_type: str, shape: Union[tuple, int], empty: bool = False + data_type: PowerGridDataType, component_type: PowerGridComponent, shape: Union[tuple, int], empty: bool = False ) -> np.ndarray: """ Initializes an array for use in Power Grid Model calculations diff --git a/src/power_grid_model/core/power_grid_model.py b/src/power_grid_model/core/power_grid_model.py index e7c1ecda2..3d07586b0 100644 --- a/src/power_grid_model/core/power_grid_model.py +++ b/src/power_grid_model/core/power_grid_model.py @@ -21,6 +21,7 @@ from power_grid_model.core.index_integer import IdNp, IdxNp from power_grid_model.core.options import Options from power_grid_model.core.power_grid_core import ConstDatasetPtr, IDPtr, IdxPtr, ModelPtr, power_grid_core as pgc +from power_grid_model.dataset_definitions import PowerGridComponent from power_grid_model.enum import ( CalculationMethod, CalculationType, @@ -36,7 +37,7 @@ class PowerGridModel: """ _model_ptr: ModelPtr - _all_component_count: Optional[Dict[str, int]] + _all_component_count: Optional[Dict[PowerGridComponent, int]] _batch_error: Optional[PowerGridBatchError] @property @@ -56,7 +57,7 @@ def _model(self): return self._model_ptr @property - def all_component_count(self) -> Dict[str, int]: + def all_component_count(self) -> Dict[PowerGridComponent, int]: """ Get count of number of elements per component type. If the count for a component type is zero, it will not be in the returned dictionary. @@ -93,7 +94,7 @@ def __new__(cls, *_args, **_kwargs): instance._all_component_count = None return instance - def __init__(self, input_data: Dict[str, np.ndarray], system_frequency: float = 50.0): + def __init__(self, input_data: Dict[PowerGridComponent, np.ndarray], system_frequency: float = 50.0): """ Initialize the model from an input data set. @@ -114,7 +115,7 @@ def __init__(self, input_data: Dict[str, np.ndarray], system_frequency: float = assert_no_error() self._all_component_count = {k: v for k, v in prepared_input.get_info().total_elements().items() if v > 0} - def update(self, *, update_data: Dict[str, np.ndarray]): + def update(self, *, update_data: Dict[PowerGridComponent, np.ndarray]): """ Update the model with changes. @@ -131,7 +132,7 @@ def update(self, *, update_data: Dict[str, np.ndarray]): pgc.update_model(self._model, prepared_update.get_dataset_ptr()) assert_no_error() - def get_indexer(self, component_type: str, ids: np.ndarray): + def get_indexer(self, component_type: PowerGridComponent, ids: np.ndarray): """ Get array of indexers given array of ids for component type @@ -158,9 +159,9 @@ def _get_output_component_count(self, calculation_type: CalculationType): CalculationType.short_circuit: ["sensor"], }.get(calculation_type, []) - def include_type(component_type: str): + def include_type(component_type: PowerGridComponent): for exclude_type in exclude_types: - if exclude_type in component_type: + if exclude_type == component_type: return False return True @@ -169,12 +170,12 @@ def include_type(component_type: str): # pylint: disable=too-many-arguments def _construct_output( self, - output_component_types: Optional[Union[Set[str], List[str]]], + output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]], calculation_type: CalculationType, symmetric: bool, is_batch: bool, batch_size: int, - ) -> Dict[str, np.ndarray]: + ) -> Dict[PowerGridComponent, np.ndarray]: all_component_count = self._get_output_component_count(calculation_type=calculation_type) # limit all component count to user specified component types in output @@ -217,8 +218,8 @@ def _calculate_impl( self, calculation_type: CalculationType, symmetric: bool, - update_data: Optional[Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]], - output_component_types: Optional[Union[Set[str], List[str]]], + update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]], + output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]], options: Options, continue_on_batch_error: bool, decode_error: bool, @@ -282,9 +283,9 @@ def _calculate_power_flow( error_tolerance: float = 1e-8, max_iterations: int = 20, calculation_method: Union[CalculationMethod, str] = CalculationMethod.newton_raphson, - update_data: Optional[Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, threading: int = -1, - output_component_types: Optional[Union[Set[str], List[str]]] = None, + output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, tap_changing_strategy: Union[TapChangingStrategy, str] = TapChangingStrategy.disabled, @@ -318,13 +319,13 @@ def _calculate_state_estimation( error_tolerance: float = 1e-8, max_iterations: int = 20, calculation_method: Union[CalculationMethod, str] = CalculationMethod.iterative_linear, - update_data: Optional[Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, threading: int = -1, - output_component_types: Optional[Union[Set[str], List[str]]] = None, + output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, experimental_features: Union[_ExperimentalFeatures, str] = _ExperimentalFeatures.disabled, - ) -> Dict[str, np.ndarray]: + ) -> Dict[PowerGridComponent, np.ndarray]: calculation_type = CalculationType.state_estimation options = self._options( calculation_type=calculation_type, @@ -349,14 +350,14 @@ def _calculate_short_circuit( self, *, calculation_method: Union[CalculationMethod, str] = CalculationMethod.iec60909, - update_data: Optional[Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, threading: int = -1, - output_component_types: Optional[Union[Set[str], List[str]]] = None, + output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, short_circuit_voltage_scaling: Union[ShortCircuitVoltageScaling, str] = ShortCircuitVoltageScaling.maximum, experimental_features: Union[_ExperimentalFeatures, str] = _ExperimentalFeatures.disabled, - ) -> Dict[str, np.ndarray]: + ) -> Dict[PowerGridComponent, np.ndarray]: calculation_type = CalculationType.short_circuit symmetric = False @@ -385,13 +386,13 @@ def calculate_power_flow( error_tolerance: float = 1e-8, max_iterations: int = 20, calculation_method: Union[CalculationMethod, str] = CalculationMethod.newton_raphson, - update_data: Optional[Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, threading: int = -1, - output_component_types: Optional[Union[Set[str], List[str]]] = None, + output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, tap_changing_strategy: Union[TapChangingStrategy, str] = TapChangingStrategy.disabled, - ) -> Dict[str, np.ndarray]: + ) -> Dict[PowerGridComponent, np.ndarray]: """ Calculate power flow once with the current model attributes. Or calculate in batch with the given update dataset in batch. @@ -474,12 +475,12 @@ def calculate_state_estimation( error_tolerance: float = 1e-8, max_iterations: int = 20, calculation_method: Union[CalculationMethod, str] = CalculationMethod.iterative_linear, - update_data: Optional[Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, threading: int = -1, - output_component_types: Optional[Union[Set[str], List[str]]] = None, + output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, - ) -> Dict[str, np.ndarray]: + ) -> Dict[PowerGridComponent, np.ndarray]: """ Calculate state estimation once with the current model attributes. Or calculate in batch with the given update dataset in batch. @@ -555,13 +556,13 @@ def calculate_short_circuit( self, *, calculation_method: Union[CalculationMethod, str] = CalculationMethod.iec60909, - update_data: Optional[Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, threading: int = -1, - output_component_types: Optional[Union[Set[str], List[str]]] = None, + output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, short_circuit_voltage_scaling: Union[ShortCircuitVoltageScaling, str] = ShortCircuitVoltageScaling.maximum, - ) -> Dict[str, np.ndarray]: + ) -> Dict[PowerGridComponent, np.ndarray]: """ Calculate a short circuit once with the current model attributes. Or calculate in batch with the given update dataset in batch From b1e034522fcd2a74e75676c950dbb73775c66d41 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Wed, 19 Jun 2024 10:31:48 +0200 Subject: [PATCH 12/47] Added unit test Signed-off-by: Santiago Figueroa --- tests/unit/test_dataset_definitions.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 tests/unit/test_dataset_definitions.py diff --git a/tests/unit/test_dataset_definitions.py b/tests/unit/test_dataset_definitions.py new file mode 100644 index 000000000..9e2a2207e --- /dev/null +++ b/tests/unit/test_dataset_definitions.py @@ -0,0 +1,24 @@ +# SPDX-FileCopyrightText: Contributors to the Power Grid Model project +# +# SPDX-License-Identifier: MPL-2.0 + +import pytest + +from power_grid_model import power_grid_meta_data +from power_grid_model.dataset_definitions import PowerGridComponents, PowerGridDataTypes + + +def test_power_grid_data_types(): + power_grid_data_types = [data_type for data_type in power_grid_meta_data] + gen_power_grid_data_types = [member.value for member in PowerGridDataTypes] + power_grid_data_types.sort() + gen_power_grid_data_types.sort() + assert power_grid_data_types == gen_power_grid_data_types + + +def test_power_grid_components(): + power_grid_components = [component for component in power_grid_meta_data["input"]] + gen_power_grid_components = [member.value for member in PowerGridComponents] + power_grid_components.sort() + gen_power_grid_components.sort() + assert power_grid_components == gen_power_grid_components From a2a75fe0fd78e3d8f16d438128ade318609981cb Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Wed, 19 Jun 2024 16:28:41 +0200 Subject: [PATCH 13/47] Added docs Signed-off-by: Santiago Figueroa --- .../power_grid_model/metadata_enums.py.jinja | 37 +++++++++++++++---- docs/api_reference/python-api-reference.md | 2 + src/power_grid_model/dataset_definitions.py | 22 ++++++++++- 3 files changed, 53 insertions(+), 8 deletions(-) diff --git a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja index 2bbcb93d8..1aace149b 100644 --- a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja +++ b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja @@ -24,19 +24,30 @@ sys.path.append(sys_path) class PowerGridDataTypes(Enum): - """Single/batch dataset types.""" + """Dataset types.""" {% for dataset_type in dataset_types %} {{ dataset_type }} = "{{ dataset_type }}" {% endfor %} + PowerGridDataTypesLiteral = Literal[ -{% for dataset_type in dataset_types %} - "{{ dataset_type }}", -{% endfor %} + {% for dataset_type in dataset_types %} + "{{ dataset_type }}", + {% endfor %} ] + PowerGridDataType: TypeAlias = Union[PowerGridDataTypes, PowerGridDataTypesLiteral] +""" +A PowerGridDataType is the type of a :class:`BatchDataset`. + +- Examples: + + - PowerGridDataType.input = "input" + - PowerGridDataType.update = "update" +""" + class PowerGridComponents(Enum): """Grid component types.""" @@ -45,10 +56,22 @@ class PowerGridComponents(Enum): {{ component }} = "{{ component }}" {% endfor %} + PowerGridComponentsLiteral = Literal[ -{% for component in components %} - "{{ component }}", -{% endfor %} + {% for component in components %} + "{{ component }}", + {% endfor %} ] + PowerGridComponent: TypeAlias = Union[PowerGridComponents, PowerGridComponentsLiteral] +""" +A PowerGridComponent is the type of a grid component. + +- Examples: + + - PowerGridComponent.node = "node" + - PowerGridComponent.line = "line" +""" + +# pylint: enable=invalid-name diff --git a/docs/api_reference/python-api-reference.md b/docs/api_reference/python-api-reference.md index 292213b8f..94508224c 100644 --- a/docs/api_reference/python-api-reference.md +++ b/docs/api_reference/python-api-reference.md @@ -32,6 +32,8 @@ SPDX-License-Identifier: MPL-2.0 .. autoclass:: power_grid_model.data_types.BatchArray .. autoclass:: power_grid_model.data_types.DenseBatchArray .. autoclass:: power_grid_model.data_types.SparseBatchArray +.. autoclass:: power_grid_model.dataset_definitions.PowerGridDataType +.. autoclass:: power_grid_model.dataset_definitions.PowerGridDataType ``` ## error types diff --git a/src/power_grid_model/dataset_definitions.py b/src/power_grid_model/dataset_definitions.py index 15facc670..2c0c6d948 100644 --- a/src/power_grid_model/dataset_definitions.py +++ b/src/power_grid_model/dataset_definitions.py @@ -24,7 +24,7 @@ class PowerGridDataTypes(Enum): - """Single/batch dataset types.""" + """Dataset types.""" input = "input" @@ -45,7 +45,16 @@ class PowerGridDataTypes(Enum): "sc_output", ] + PowerGridDataType: TypeAlias = Union[PowerGridDataTypes, PowerGridDataTypesLiteral] +""" +A PowerGridDataType is the type of a :class:`BatchDataset`. + +- Examples: + + - PowerGridDataType.input = "input" + - PowerGridDataType.update = "update" +""" class PowerGridComponents(Enum): @@ -103,4 +112,15 @@ class PowerGridComponents(Enum): "fault", ] + PowerGridComponent: TypeAlias = Union[PowerGridComponents, PowerGridComponentsLiteral] +""" +A PowerGridComponent is the type of a grid component. + +- Examples: + + - PowerGridComponent.node = "node" + - PowerGridComponent.line = "line" +""" + +# pylint: enable=invalid-name From 325fbe754b491e0b013b4f966df2315d795002ea Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Wed, 19 Jun 2024 16:45:21 +0200 Subject: [PATCH 14/47] Gen_code after bug fix Signed-off-by: Santiago Figueroa --- src/power_grid_model/dataset_definitions.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/power_grid_model/dataset_definitions.py b/src/power_grid_model/dataset_definitions.py index 2c0c6d948..2a9480c37 100644 --- a/src/power_grid_model/dataset_definitions.py +++ b/src/power_grid_model/dataset_definitions.py @@ -68,6 +68,8 @@ class PowerGridComponents(Enum): transformer = "transformer" + transformer_tap_regulator = "transformer_tap_regulator" + three_winding_transformer = "three_winding_transformer" sym_load = "sym_load" @@ -98,6 +100,7 @@ class PowerGridComponents(Enum): "line", "link", "transformer", + "transformer_tap_regulator", "three_winding_transformer", "sym_load", "sym_gen", From 8aaa9a29613024b1e062cea1bbc6b0bd6d0d14cf Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Wed, 19 Jun 2024 17:02:59 +0200 Subject: [PATCH 15/47] Removed TypeAlias until python 3.9 is dropped. Signed-off-by: Santiago Figueroa --- .../templates/src/power_grid_model/metadata_enums.py.jinja | 6 +++--- src/power_grid_model/dataset_definitions.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja index 1aace149b..3a06ee927 100644 --- a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja +++ b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja @@ -7,7 +7,7 @@ # This file is automatically generated. DO NOT modify it manually! import sys -from typing import Literal, TypeAlias, Union +from typing import Literal, Union # To avoid conflicts with src/power_grid_model/enum.py # pylint: disable=wrong-import-position @@ -38,7 +38,7 @@ PowerGridDataTypesLiteral = Literal[ ] -PowerGridDataType: TypeAlias = Union[PowerGridDataTypes, PowerGridDataTypesLiteral] +PowerGridDataType = Union[PowerGridDataTypes, PowerGridDataTypesLiteral] """ A PowerGridDataType is the type of a :class:`BatchDataset`. @@ -64,7 +64,7 @@ PowerGridComponentsLiteral = Literal[ ] -PowerGridComponent: TypeAlias = Union[PowerGridComponents, PowerGridComponentsLiteral] +PowerGridComponent = Union[PowerGridComponents, PowerGridComponentsLiteral] """ A PowerGridComponent is the type of a grid component. diff --git a/src/power_grid_model/dataset_definitions.py b/src/power_grid_model/dataset_definitions.py index 2a9480c37..f9f2687ff 100644 --- a/src/power_grid_model/dataset_definitions.py +++ b/src/power_grid_model/dataset_definitions.py @@ -7,7 +7,7 @@ # This file is automatically generated. DO NOT modify it manually! import sys -from typing import Literal, TypeAlias, Union +from typing import Literal, Union # To avoid conflicts with src/power_grid_model/enum.py # pylint: disable=wrong-import-position @@ -46,7 +46,7 @@ class PowerGridDataTypes(Enum): ] -PowerGridDataType: TypeAlias = Union[PowerGridDataTypes, PowerGridDataTypesLiteral] +PowerGridDataType = Union[PowerGridDataTypes, PowerGridDataTypesLiteral] """ A PowerGridDataType is the type of a :class:`BatchDataset`. @@ -116,7 +116,7 @@ class PowerGridComponents(Enum): ] -PowerGridComponent: TypeAlias = Union[PowerGridComponents, PowerGridComponentsLiteral] +PowerGridComponent = Union[PowerGridComponents, PowerGridComponentsLiteral] """ A PowerGridComponent is the type of a grid component. From c16ddd5f115e5745281f8b32a25a87d620ede0f3 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Wed, 19 Jun 2024 17:32:54 +0200 Subject: [PATCH 16/47] Fixed missing PowerGridComponent types update Signed-off-by: Santiago Figueroa --- src/power_grid_model/core/serialization.py | 38 ++++++++++++++++------ src/power_grid_model/data_types.py | 36 +++----------------- 2 files changed, 32 insertions(+), 42 deletions(-) diff --git a/src/power_grid_model/core/serialization.py b/src/power_grid_model/core/serialization.py index bc5c50b0a..d2adc63eb 100644 --- a/src/power_grid_model/core/serialization.py +++ b/src/power_grid_model/core/serialization.py @@ -23,7 +23,7 @@ power_grid_core as pgc, ) from power_grid_model.core.power_grid_dataset import CConstDataset, CWritableDataset -from power_grid_model.dataset_definitions import PowerGridDataType +from power_grid_model.dataset_definitions import PowerGridComponent, PowerGridDataType from power_grid_model.errors import PowerGridSerializationError @@ -64,7 +64,7 @@ def __del__(self): if hasattr(self, "_deserializer"): pgc.destroy_deserializer(self._deserializer) - def load(self) -> Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]: + def load(self) -> Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]: """ Load the deserialized data to a new dataset. @@ -84,13 +84,19 @@ class Serializer(ABC): Serializer for the Power grid model """ - _data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]] + _data: Union[ + Mapping[PowerGridComponent, np.ndarray], + Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + ] _dataset: CConstDataset _serializer: SerializerPtr def __new__( cls, - data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], + data: Union[ + Mapping[PowerGridComponent, np.ndarray], + Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + ], serialization_type: SerializationType, dataset_type: Optional[PowerGridDataType] = None, ): @@ -198,7 +204,10 @@ class JsonSerializer(_StringSerializer): # pylint: disable=too-few-public-metho def __new__( cls, - data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], + data: Union[ + Mapping[PowerGridComponent, np.ndarray], + Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + ], dataset_type: Optional[PowerGridDataType] = None, ): return super().__new__(cls, data, SerializationType.JSON, dataset_type=dataset_type) @@ -211,13 +220,16 @@ class MsgpackSerializer(_BytesSerializer): # pylint: disable=too-few-public-met def __new__( cls, - data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], + data: Union[ + Mapping[PowerGridComponent, np.ndarray], + Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + ], dataset_type: Optional[PowerGridDataType] = None, ): return super().__new__(cls, data, SerializationType.MSGPACK, dataset_type=dataset_type) -def json_deserialize(data: Union[str, bytes]) -> Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]: +def json_deserialize(data: Union[str, bytes]) -> Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]: """ Load serialized JSON data to a new dataset. @@ -237,7 +249,10 @@ def json_deserialize(data: Union[str, bytes]) -> Dict[str, Union[np.ndarray, Dic def json_serialize( - data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], + data: Union[ + Mapping[PowerGridComponent, np.ndarray], + Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + ], dataset_type: Optional[PowerGridDataType] = None, use_compact_list: bool = False, indent: int = 2, @@ -267,7 +282,7 @@ def json_serialize( return result -def msgpack_deserialize(data: bytes) -> Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]]: +def msgpack_deserialize(data: bytes) -> Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]: """ Load serialized msgpack data to a new dataset. @@ -287,7 +302,10 @@ def msgpack_deserialize(data: bytes) -> Dict[str, Union[np.ndarray, Dict[str, np def msgpack_serialize( - data: Union[Mapping[str, np.ndarray], Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]]], + data: Union[ + Mapping[PowerGridComponent, np.ndarray], + Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + ], dataset_type: Optional[PowerGridDataType] = None, use_compact_list: bool = False, ) -> bytes: diff --git a/src/power_grid_model/data_types.py b/src/power_grid_model/data_types.py index d946a67d6..69b1cdadd 100644 --- a/src/power_grid_model/data_types.py +++ b/src/power_grid_model/data_types.py @@ -7,12 +7,11 @@ have been defined and explained in this file. """ -from enum import Enum from typing import Dict, List, Tuple, Union import numpy as np -from power_grid_model import power_grid_meta_data +from power_grid_model.dataset_definitions import PowerGridComponent # When we're dropping python 3.8, we should introduce proper NumPy type hinting @@ -63,7 +62,7 @@ A data array can be a :class:`SingleArray` or a :class:`BatchArray`. """ -SingleDataset = Dict[str, SingleArray] +SingleDataset = Dict[PowerGridComponent, SingleArray] """ A single dataset is a dictionary where the keys are the component types and the values are :class:`SingleArray` @@ -71,7 +70,7 @@ - Example: {"node": :class:`SingleArray`, "line": :class:`SingleArray`} """ -BatchDataset = Dict[str, BatchArray] +BatchDataset = Dict[PowerGridComponent, BatchArray] """ A batch dataset is a dictionary where the keys are the component types and the values are :class:`BatchArray` @@ -145,7 +144,7 @@ - Example: [{"id": 1, "u_rated": 10500.0}, {"id": 2, "u_rated": 10500.0}] """ -SinglePythonDataset = Dict[str, ComponentList] +SinglePythonDataset = Dict[PowerGridComponent, ComponentList] """ A single dataset in native python representation is a dictionary, where the keys are the component names and the values are a list of all the instances of such a component. In essence it stores the same information as a @@ -189,30 +188,3 @@ [{"line": [{"id": 3, "from_status": 0, "to_status": 0, ...}],}, {"line": [{"id": 3, "from_status": 1, "to_status": 1, ...}],}] """ - -# comply with mypy -PowerGridDataTypes = Enum( # type: ignore - "PowerGridDataTypes", {data_type: data_type for data_type in power_grid_meta_data} # type: ignore -) # type: ignore -""" -Types of single/batch datasets: - - - input: Dataset with attributes relevant to the grid configuration (e.g. id, from_node, from_status). - - - update: Dataset with attributes relevant to multiple scenarios (e.g. from_status, to_status). - - - sym_output: Dataset with attributes relevant to symmetrical steady state output of power flow or state estimation - calculation (e.g. p_from, p_to). - - - asym_output: Dataset with attributes relevant to asymmetrical steady state output of power flow or state estimation - calculation (e.g. p_from, p_to). - - - sc_output: Contains attributes relevant to symmetrical short circuit calculation output. Like for the asym_output, - detailed data for all 3 phases will be provided where relevant (e.g. i_from, i_from_angle). -""" - -# to comply with mypy -PowerGridComponents = Enum( # type: ignore - "PowerGridComponents", {component: component for component in power_grid_meta_data["input"]} # type: ignore -) # type: ignore -"""Grid component types.""" From 6f2237c23a502ae25c68cafeaf51fee3fe121b10 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Wed, 19 Jun 2024 23:50:49 +0200 Subject: [PATCH 17/47] Updated most missing component types. Signed-off-by: Santiago Figueroa --- src/power_grid_model/_utils.py | 7 +- src/power_grid_model/validation/errors.py | 42 +++++---- src/power_grid_model/validation/rules.py | 86 +++++++++++-------- src/power_grid_model/validation/utils.py | 16 ++-- src/power_grid_model/validation/validation.py | 24 +++--- 5 files changed, 99 insertions(+), 76 deletions(-) diff --git a/src/power_grid_model/_utils.py b/src/power_grid_model/_utils.py index b4c120069..6cbddacdf 100644 --- a/src/power_grid_model/_utils.py +++ b/src/power_grid_model/_utils.py @@ -24,6 +24,7 @@ SinglePythonDataset, SparseBatchArray, ) +from power_grid_model.dataset_definitions import PowerGridComponent def is_nan(data) -> bool: @@ -95,7 +96,7 @@ def get_and_verify_batch_sizes(batch_data: BatchDataset) -> int: """ n_batch_size = 0 - checked_components: List[str] = [] + checked_components: List[PowerGridComponent] = [] for component, data in batch_data.items(): n_component_batch_size = get_batch_size(data) if checked_components and n_component_batch_size != n_batch_size: @@ -143,7 +144,7 @@ def get_batch_size(batch_data: BatchArray) -> int: return n_batches -def split_numpy_array_in_batches(data: np.ndarray, component: str) -> List[np.ndarray]: +def split_numpy_array_in_batches(data: np.ndarray, component: PowerGridComponent) -> List[np.ndarray]: """ Split a single dense numpy array into one or more batches @@ -170,7 +171,7 @@ def split_numpy_array_in_batches(data: np.ndarray, component: str) -> List[np.nd ) -def split_sparse_batches_in_batches(batch_data: SparseBatchArray, component: str) -> List[np.ndarray]: +def split_sparse_batches_in_batches(batch_data: SparseBatchArray, component: PowerGridComponent) -> List[np.ndarray]: """ Split a single numpy array representing, a compressed sparse structure, into one or more batches diff --git a/src/power_grid_model/validation/errors.py b/src/power_grid_model/validation/errors.py index 64c4a8d2e..97a1691a1 100644 --- a/src/power_grid_model/validation/errors.py +++ b/src/power_grid_model/validation/errors.py @@ -10,6 +10,8 @@ from enum import Enum from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Type, Union +from power_grid_model.dataset_definitions import PowerGridComponent + class ValidationError(ABC): """ @@ -30,18 +32,18 @@ class ValidationError(ABC): """ - component: Optional[Union[str, List[str]]] = None + component: Optional[Union[PowerGridComponent, List[PowerGridComponent]]] = None """ The component, or components, to which the error applies. """ - field: Optional[Union[str, List[str], List[Tuple[str, str]]]] = None + field: Optional[Union[str, List[str], List[Tuple[PowerGridComponent, str]]]] = None """ The field, or fields, to which the error applies. A field can also be a tuple (component, field) when multiple components are being addressed. """ - ids: Optional[Union[List[int], List[Tuple[str, int]]]] = None + ids: Optional[Union[List[int], List[Tuple[PowerGridComponent, int]]]] = None """ The object identifiers to which the error applies. A field object identifier can also be a tuple (component, id) when multiple components are being addressed. @@ -113,11 +115,11 @@ class SingleFieldValidationError(ValidationError): """ _message = "Field {field} is not valid for {n} {objects}." - component: str + component: PowerGridComponent field: str ids: List[int] - def __init__(self, component: str, field: str, ids: Iterable[int]): + def __init__(self, component: PowerGridComponent, field: str, ids: Iterable[int]): """ Args: component: Component name @@ -135,11 +137,11 @@ class MultiFieldValidationError(ValidationError): """ _message = "Combination of fields {field} is not valid for {n} {objects}." - component: str + component: PowerGridComponent field: List[str] ids: List[int] - def __init__(self, component: str, fields: List[str], ids: List[int]): + def __init__(self, component: PowerGridComponent, fields: List[str], ids: List[int]): """ Args: component: Component name @@ -165,12 +167,12 @@ class MultiComponentValidationError(ValidationError): E.g. the two fields `id` fields of the `node` and `line` component: [('node', 'id'), ('line', 'id')]. """ - component: List[str] - field: List[Tuple[str, str]] - ids: List[Tuple[str, int]] + component: List[PowerGridComponent] + field: List[Tuple[PowerGridComponent, str]] + ids: List[Tuple[PowerGridComponent, int]] _message = "Fields {field} are not valid for {n} {objects}." - def __init__(self, fields: List[Tuple[str, str]], ids: List[Tuple[str, int]]): + def __init__(self, fields: List[Tuple[PowerGridComponent, str]], ids: List[Tuple[PowerGridComponent, int]]): """ Args: fields: List of field names, formatted as tuples (component, field) @@ -205,7 +207,7 @@ class NotIdenticalError(SingleFieldValidationError): unique: Set[Any] num_unique: int - def __init__(self, component: str, field: str, ids: Iterable[int], values: List[Any]): + def __init__(self, component: PowerGridComponent, field: str, ids: Iterable[int], values: List[Any]): super().__init__(component, field, ids) self.values = values self.unique = set(self.values) @@ -239,7 +241,9 @@ class InvalidEnumValueError(SingleFieldValidationError): _message = "Field {field} contains invalid {enum} values for {n} {objects}." enum: Union[Type[Enum], List[Type[Enum]]] - def __init__(self, component: str, field: str, ids: List[int], enum: Union[Type[Enum], List[Type[Enum]]]): + def __init__( + self, component: PowerGridComponent, field: str, ids: List[int], enum: Union[Type[Enum], List[Type[Enum]]] + ): super().__init__(component, field, ids) self.enum = enum @@ -292,7 +296,7 @@ class IdNotInDatasetError(SingleFieldValidationError): _message = "ID does not exist in {ref_dataset} for {n} {objects}." ref_dataset: str - def __init__(self, component: str, ids: List[int], ref_dataset: str): + def __init__(self, component: PowerGridComponent, ids: List[int], ref_dataset: str): super().__init__(component=component, field="id", ids=ids) self.ref_dataset = ref_dataset @@ -317,14 +321,14 @@ class InvalidIdError(SingleFieldValidationError): """ _message = "Field {field} does not contain a valid {ref_components} id for {n} {objects}. {filters}" - ref_components: List[str] + ref_components: List[PowerGridComponent] def __init__( self, - component: str, + component: PowerGridComponent, field: str, ids: List[int], - ref_components: Union[str, List[str]], + ref_components: Union[PowerGridComponent, List[PowerGridComponent]], filters: Optional[Dict[str, Any]] = None, ): # pylint: disable=too-many-arguments @@ -372,7 +376,7 @@ class ComparisonError(SingleFieldValidationError): RefType = Union[int, float, str, Tuple[Union[int, float, str], ...]] - def __init__(self, component: str, field: str, ids: List[int], ref_value: "ComparisonError.RefType"): + def __init__(self, component: PowerGridComponent, field: str, ids: List[int], ref_value: "ComparisonError.RefType"): super().__init__(component, field, ids) self.ref_value = ref_value @@ -480,7 +484,7 @@ class InvalidAssociatedEnumValueError(MultiFieldValidationError): def __init__( self, - component: str, + component: PowerGridComponent, fields: List[str], ids: List[int], enum: Union[Type[Enum], List[Type[Enum]]], diff --git a/src/power_grid_model/validation/rules.py b/src/power_grid_model/validation/rules.py index c15507a8c..d8ab0b3eb 100644 --- a/src/power_grid_model/validation/rules.py +++ b/src/power_grid_model/validation/rules.py @@ -23,7 +23,7 @@ data: SingleDataset The entire input/update data set - component: str + component: PowerGridComponent The name of the component, which should be an existing key in the data field: str @@ -40,6 +40,7 @@ import numpy as np from power_grid_model.data_types import SingleDataset +from power_grid_model.dataset_definitions import PowerGridComponent from power_grid_model.enum import FaultPhase, FaultType, WindingType from power_grid_model.validation.errors import ( ComparisonError, @@ -80,14 +81,14 @@ CompError = TypeVar("CompError", bound=ComparisonError) -def all_greater_than_zero(data: SingleDataset, component: str, field: str) -> List[NotGreaterThanError]: +def all_greater_than_zero(data: SingleDataset, component: PowerGridComponent, field: str) -> List[NotGreaterThanError]: """ Check that for all records of a particular type of component, the values in the 'field' column are greater than zero. Returns an empty list on success, or a list containing a single error object on failure. Args: data (SingleDataset): The input/update data set for all components - component (str): The component of interest + component (PowerGridComponent): The component of interest field (str): The field of interest Returns: @@ -99,7 +100,7 @@ def all_greater_than_zero(data: SingleDataset, component: str, field: str) -> Li def all_greater_than_or_equal_to_zero( data: SingleDataset, - component: str, + component: PowerGridComponent, field: str, default_value: Optional[Union[np.ndarray, int, float]] = None, ) -> List[NotGreaterOrEqualError]: @@ -109,7 +110,7 @@ def all_greater_than_or_equal_to_zero( Args: data (SingleDataset): The input/update data set for all components - component (str) The component of interest + component (PowerGridComponent) The component of interest field (str): The field of interest default_value (Optional[Union[np.ndarray, int, float]], optional): Some values are not required, but will receive a default value in the C++ core. To do a proper input validation, these default values should be @@ -124,7 +125,7 @@ def all_greater_than_or_equal_to_zero( def all_greater_than( - data: SingleDataset, component: str, field: str, ref_value: Union[int, float, str] + data: SingleDataset, component: PowerGridComponent, field: str, ref_value: Union[int, float, str] ) -> List[NotGreaterThanError]: """ Check that for all records of a particular type of component, the values in the 'field' column are greater than @@ -151,7 +152,7 @@ def not_greater(val: np.ndarray, *ref: np.ndarray): def all_greater_or_equal( data: SingleDataset, - component: str, + component: PowerGridComponent, field: str, ref_value: Union[int, float, str], default_value: Optional[Union[np.ndarray, int, float]] = None, @@ -187,7 +188,7 @@ def not_greater_or_equal(val: np.ndarray, *ref: np.ndarray): def all_less_than( - data: SingleDataset, component: str, field: str, ref_value: Union[int, float, str] + data: SingleDataset, component: PowerGridComponent, field: str, ref_value: Union[int, float, str] ) -> List[NotLessThanError]: """ Check that for all records of a particular type of component, the values in the 'field' column are less than the @@ -213,7 +214,7 @@ def not_less(val: np.ndarray, *ref: np.ndarray): def all_less_or_equal( - data: SingleDataset, component: str, field: str, ref_value: Union[int, float, str] + data: SingleDataset, component: PowerGridComponent, field: str, ref_value: Union[int, float, str] ) -> List[NotLessOrEqualError]: """ Check that for all records of a particular type of component, the values in the 'field' column are less than, @@ -242,7 +243,7 @@ def not_less_or_equal(val: np.ndarray, *ref: np.ndarray): def all_between( # pylint: disable=too-many-arguments data: SingleDataset, - component: str, + component: PowerGridComponent, field: str, ref_value_1: Union[int, float, str], ref_value_2: Union[int, float, str], @@ -282,7 +283,7 @@ def outside(val: np.ndarray, *ref: np.ndarray) -> np.ndarray: def all_between_or_at( # pylint: disable=too-many-arguments data: SingleDataset, - component: str, + component: PowerGridComponent, field: str, ref_value_1: Union[int, float, str], ref_value_2: Union[int, float, str], @@ -332,7 +333,7 @@ def outside(val: np.ndarray, *ref: np.ndarray) -> np.ndarray: def none_match_comparison( data: SingleDataset, - component: str, + component: PowerGridComponent, field: str, compare_fn: Callable, ref_value: ComparisonError.RefType, @@ -383,7 +384,7 @@ def none_match_comparison( return [] -def all_identical(data: SingleDataset, component: str, field: str) -> List[NotIdenticalError]: +def all_identical(data: SingleDataset, component: PowerGridComponent, field: str) -> List[NotIdenticalError]: """ Check that for all records of a particular type of component, the values in the 'field' column are identical. @@ -407,7 +408,7 @@ def all_identical(data: SingleDataset, component: str, field: str) -> List[NotId def all_enabled_identical( - data: SingleDataset, component: str, field: str, status_field: str + data: SingleDataset, component: PowerGridComponent, field: str, status_field: str ) -> List[NotIdenticalError]: """ Check that for all records of a particular type of component, the values in the 'field' column are identical. @@ -435,7 +436,7 @@ def all_enabled_identical( ) -def all_unique(data: SingleDataset, component: str, field: str) -> List[NotUniqueError]: +def all_unique(data: SingleDataset, component: PowerGridComponent, field: str) -> List[NotUniqueError]: """ Check that for all records of a particular type of component, the values in the 'field' column are unique within the 'field' column of that component. @@ -459,7 +460,7 @@ def all_unique(data: SingleDataset, component: str, field: str) -> List[NotUniqu def all_cross_unique( - data: SingleDataset, fields: List[Tuple[str, str]], cross_only=True + data: SingleDataset, fields: List[Tuple[PowerGridComponent, str]], cross_only=True ) -> List[MultiComponentNotUniqueError]: """ Check that for all records of a particular type of component, the values in the 'field' column are unique within @@ -476,7 +477,7 @@ def all_cross_unique( A list containing zero or one MultiComponentNotUniqueError, listing all fields and ids where the value was not unique between the fields. """ - all_values: Dict[int, List[Tuple[Tuple[str, str], int]]] = {} + all_values: Dict[int, List[Tuple[Tuple[PowerGridComponent, str], int]]] = {} duplicate_ids = set() for component, field in fields: for obj_id, value in zip(data[component]["id"], data[component][field]): @@ -495,7 +496,7 @@ def all_cross_unique( def all_valid_enum_values( - data: SingleDataset, component: str, field: str, enum: Union[Type[Enum], List[Type[Enum]]] + data: SingleDataset, component: PowerGridComponent, field: str, enum: Union[Type[Enum], List[Type[Enum]]] ) -> List[InvalidEnumValueError]: """ Check that for all records of a particular type of component, the values in the 'field' column are valid values for @@ -503,7 +504,7 @@ def all_valid_enum_values( Args: data (SingleDataset): The input/update data set for all components - component (str): The component of interest + component (PowerGridComponent): The component of interest field (str): The field of interest enum (Type[Enum] | List[Type[Enum]]): The enum type to validate against, or a list of such enum types @@ -526,20 +527,20 @@ def all_valid_enum_values( def all_valid_associated_enum_values( # pylint: disable=too-many-arguments data: SingleDataset, - component: str, + component: PowerGridComponent, field: str, ref_object_id_field: str, - ref_components: List[str], + ref_components: List[PowerGridComponent], enum: Union[Type[Enum], List[Type[Enum]]], **filters: Any, ) -> List[InvalidAssociatedEnumValueError]: """ Args: data (SingleDataset): The input/update data set for all components - component (str): The component of interest + component (PowerGridComponent): The component of interest field (str): The field of interest ref_object_id_field (str): The field that contains the referenced component ids - ref_components (List[str]): The component or components in which we want to look for ids + ref_components (List[PowerGridComponent]): The component or components in which we want to look for ids enum (Type[Enum] | List[Type[Enum]]): The enum type to validate against, or a list of such enum types **filters: One or more filters on the dataset. E.g. regulated_object="transformer". @@ -567,7 +568,11 @@ def all_valid_associated_enum_values( # pylint: disable=too-many-arguments def all_valid_ids( - data: SingleDataset, component: str, field: str, ref_components: Union[str, List[str]], **filters: Any + data: SingleDataset, + component: PowerGridComponent, + field: str, + ref_components: Union[PowerGridComponent, List[PowerGridComponent]], + **filters: Any, ) -> List[InvalidIdError]: """ For a column which should contain object identifiers (ids), check if the id exists in the data, for a specific set @@ -595,7 +600,7 @@ def all_valid_ids( return [] -def all_boolean(data: SingleDataset, component: str, field: str) -> List[NotBooleanError]: +def all_boolean(data: SingleDataset, component: PowerGridComponent, field: str) -> List[NotBooleanError]: """ Check that for all records of a particular type of component, the values in the 'field' column are valid boolean values, i.e. 0 or 1. Returns an empty list on success, or a list containing a single error object on failure. @@ -617,7 +622,7 @@ def all_boolean(data: SingleDataset, component: str, field: str) -> List[NotBool def all_not_two_values_zero( - data: SingleDataset, component: str, field_1: str, field_2: str + data: SingleDataset, component: PowerGridComponent, field_1: str, field_2: str ) -> List[TwoValuesZeroError]: """ Check that for all records of a particular type of component, the values in the 'field_1' and 'field_2' column are @@ -641,7 +646,9 @@ def all_not_two_values_zero( return [] -def all_not_two_values_equal(data: SingleDataset, component: str, field_1: str, field_2: str) -> List[SameValueError]: +def all_not_two_values_equal( + data: SingleDataset, component: PowerGridComponent, field_1: str, field_2: str +) -> List[SameValueError]: """ Check that for all records of a particular type of component, the values in the 'field_1' and 'field_2' column are not both the same value. E.g. from_node and to_node of a line. Returns an empty list on success, or a list @@ -666,7 +673,7 @@ def all_not_two_values_equal(data: SingleDataset, component: str, field_1: str, def all_ids_exist_in_data_set( - data: SingleDataset, ref_data: SingleDataset, component: str, ref_name: str + data: SingleDataset, ref_data: SingleDataset, component: PowerGridComponent, ref_name: str ) -> List[IdNotInDatasetError]: """ Check that for all records of a particular type of component, the ids exist in the reference data set. @@ -687,7 +694,9 @@ def all_ids_exist_in_data_set( return [] -def all_finite(data: SingleDataset, exceptions: Optional[Dict[str, List[str]]] = None) -> List[InfinityError]: +def all_finite( + data: SingleDataset, exceptions: Optional[Dict[PowerGridComponent, List[str]]] = None +) -> List[InfinityError]: """ Check that for all records in all component, the values in all columns are finite value, i.e. float values other than inf, or -inf. Nan values are ignored, as in all other comparison functions. You can use non_missing() to @@ -720,7 +729,10 @@ def all_finite(data: SingleDataset, exceptions: Optional[Dict[str, List[str]]] = def none_missing( - data: SingleDataset, component: str, fields: Union[List[Union[str, List[str]]], str, List[str]], index: int = 0 + data: SingleDataset, + component: PowerGridComponent, + fields: Union[List[Union[str, List[str]]], str, List[str]], + index: int = 0, ) -> List[MissingValueError]: """ Check that for all records of a particular type of component, the values in the 'fields' columns are not NaN. @@ -755,7 +767,7 @@ def none_missing( return errors -def valid_p_q_sigma(data: SingleDataset, component: str) -> List[MultiFieldValidationError]: +def valid_p_q_sigma(data: SingleDataset, component: PowerGridComponent) -> List[MultiFieldValidationError]: """ Check validity of the pair `(p_sigma, q_sigma)` for 'sym_power_sensor' and 'asym_power_sensor'. @@ -788,7 +800,7 @@ def valid_p_q_sigma(data: SingleDataset, component: str) -> List[MultiFieldValid def all_valid_clocks( - data: SingleDataset, component: str, clock_field: str, winding_from_field: str, winding_to_field: str + data: SingleDataset, component: PowerGridComponent, clock_field: str, winding_from_field: str, winding_to_field: str ) -> List[TransformerClockError]: """ Custom validation rule: Odd clock number is only allowed for Dy(n) or Y(N)d configuration. @@ -826,7 +838,7 @@ def all_valid_clocks( def all_valid_fault_phases( - data: SingleDataset, component: str, fault_type_field: str, fault_phase_field: str + data: SingleDataset, component: PowerGridComponent, fault_type_field: str, fault_phase_field: str ) -> List[FaultPhaseError]: """ Custom validation rule: Only a subset of fault_phases is supported for each fault type. @@ -881,19 +893,19 @@ def _fault_phase_supported(fault_type: FaultType, fault_phase: FaultPhase): def all_supported_tap_control_side( # pylint: disable=too-many-arguments data: SingleDataset, - component: str, + component: PowerGridComponent, control_side_field: str, regulated_object_field: str, - tap_side_fields: List[Tuple[str, str]], + tap_side_fields: List[Tuple[PowerGridComponent, str]], **filters: Any, ) -> List[UnsupportedTransformerRegulationError]: """ Args: data (SingleDataset): The input/update data set for all components - component (str): The component of interest + component (PowerGridComponent): The component of interest control_side_field (str): The field of interest regulated_object_field (str): The field that contains the regulated component ids - tap_side_fields (List[Tuple[str, str]]): The fields of interest per regulated component, + tap_side_fields (List[Tuple[PowerGridComponent, str]]): The fields of interest per regulated component, formatted as [(component_1, field_1), (component_2, field_2)] **filters: One or more filters on the dataset. E.g. regulated_object="transformer". diff --git a/src/power_grid_model/validation/utils.py b/src/power_grid_model/validation/utils.py index dc52421c7..326a9e48c 100644 --- a/src/power_grid_model/validation/utils.py +++ b/src/power_grid_model/validation/utils.py @@ -12,7 +12,7 @@ from power_grid_model import power_grid_meta_data from power_grid_model.data_types import SingleDataset -from power_grid_model.dataset_definitions import PowerGridDataType +from power_grid_model.dataset_definitions import PowerGridComponent, PowerGridDataType from power_grid_model.validation.errors import ValidationError @@ -97,7 +97,7 @@ def update_input_data(input_data: SingleDataset, update_data: SingleDataset): return merged_data -def update_component_data(component: str, input_data: np.ndarray, update_data: np.ndarray) -> None: +def update_component_data(component: PowerGridComponent, input_data: np.ndarray, update_data: np.ndarray) -> None: """ Update the data in a numpy array, with another numpy array, indexed on the "id" field and only non-NaN values are overwritten. @@ -162,7 +162,7 @@ def errors_to_string( return msg -def nan_type(component: str, field: str, data_type: PowerGridDataType = "input"): +def nan_type(component: PowerGridComponent, field: str, data_type: PowerGridDataType = "input"): """ Helper function to retrieve the nan value for a certain field as defined in the power_grid_meta_data. """ @@ -205,7 +205,9 @@ def get_indexer(source: np.ndarray, target: np.ndarray, default_value: Optional[ return np.where(source[clipped_indices] == target, permutation_sort[clipped_indices], default_value) -def set_default_value(data: SingleDataset, component: str, field: str, default_value: Union[int, float, np.ndarray]): +def set_default_value( + data: SingleDataset, component: PowerGridComponent, field: str, default_value: Union[int, float, np.ndarray] +): """ This function sets the default value in the data that is to be validated, so the default values are included in the validation. @@ -231,7 +233,9 @@ def set_default_value(data: SingleDataset, component: str, field: str, default_v data[component][field][mask] = default_value -def get_valid_ids(data: SingleDataset, ref_components: Union[str, List[str]]) -> List[int]: +def get_valid_ids( + data: SingleDataset, ref_components: Union[PowerGridComponent, List[PowerGridComponent]] +) -> List[int]: """ This function returns the valid IDs specified by all ref_components @@ -261,7 +265,7 @@ def get_valid_ids(data: SingleDataset, ref_components: Union[str, List[str]]) -> return list(valid_ids) -def get_mask(data: SingleDataset, component: str, field: str, **filters: Any) -> np.ndarray: +def get_mask(data: SingleDataset, component: PowerGridComponent, field: str, **filters: Any) -> np.ndarray: """ Get a mask based on the specified filters. E.g. measured_terminal_type=MeasuredTerminalType.source. diff --git a/src/power_grid_model/validation/validation.py b/src/power_grid_model/validation/validation.py index 2f4e9b18a..bee85ed86 100644 --- a/src/power_grid_model/validation/validation.py +++ b/src/power_grid_model/validation/validation.py @@ -18,7 +18,7 @@ from power_grid_model import power_grid_meta_data from power_grid_model._utils import convert_batch_dataset_to_batch_list from power_grid_model.data_types import BatchDataset, Dataset, SingleDataset -from power_grid_model.dataset_definitions import PowerGridDataType +from power_grid_model.dataset_definitions import PowerGridComponent, PowerGridDataType from power_grid_model.enum import ( Branch3Side, BranchSide, @@ -205,7 +205,9 @@ def validate_unique_ids_across_components(data: SingleDataset) -> List[MultiComp return all_cross_unique(data, [(component, "id") for component in data]) -def validate_ids_exist(update_data: Dict[str, np.ndarray], input_data: SingleDataset) -> List[IdNotInDatasetError]: +def validate_ids_exist( + update_data: Dict[PowerGridComponent, np.ndarray], input_data: SingleDataset +) -> List[IdNotInDatasetError]: """ Checks if all ids of the components in the update data exist in the input data. This needs to be true, because you can only update existing components. @@ -226,7 +228,7 @@ def validate_ids_exist(update_data: Dict[str, np.ndarray], input_data: SingleDat def _process_power_sigma_and_p_q_sigma( - data: SingleDataset, sensor: str, required_list: Dict[str, List[Union[str, List[str]]]] + data: SingleDataset, sensor: PowerGridComponent, required_list: Dict[str, List[Union[str, List[str]]]] ) -> None: """ Helper function to process the required list when both `p_sigma` and `q_sigma` exist @@ -493,7 +495,7 @@ def validate_values(data: SingleDataset, calculation_type: Optional[CalculationT # pylint: disable=missing-function-docstring -def validate_base(data: SingleDataset, component: str) -> List[ValidationError]: +def validate_base(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: errors: List[ValidationError] = list(all_unique(data, component, "id")) return errors @@ -504,7 +506,7 @@ def validate_node(data: SingleDataset) -> List[ValidationError]: return errors -def validate_branch(data: SingleDataset, component: str) -> List[ValidationError]: +def validate_branch(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: errors = validate_base(data, component) errors += all_valid_ids(data, component, "from_node", "node") errors += all_valid_ids(data, component, "to_node", "node") @@ -550,7 +552,7 @@ def validate_transformer(data: SingleDataset) -> List[ValidationError]: return errors -def validate_branch3(data: SingleDataset, component: str) -> List[ValidationError]: +def validate_branch3(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: errors = validate_base(data, component) errors += all_valid_ids(data, component, "node_1", "node") errors += all_valid_ids(data, component, "node_2", "node") @@ -682,7 +684,7 @@ def validate_three_winding_transformer(data: SingleDataset) -> List[ValidationEr return errors -def validate_appliance(data: SingleDataset, component: str) -> List[ValidationError]: +def validate_appliance(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: errors = validate_base(data, component) errors += all_boolean(data, component, "status") errors += all_valid_ids(data, component, "node", "node") @@ -698,7 +700,7 @@ def validate_source(data: SingleDataset) -> List[ValidationError]: return errors -def validate_generic_load_gen(data: SingleDataset, component: str) -> List[ValidationError]: +def validate_generic_load_gen(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: errors = validate_appliance(data, component) errors += all_valid_enum_values(data, component, "type", LoadGenType) return errors @@ -709,7 +711,7 @@ def validate_shunt(data: SingleDataset) -> List[ValidationError]: return errors -def validate_generic_voltage_sensor(data: SingleDataset, component: str) -> List[ValidationError]: +def validate_generic_voltage_sensor(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: errors = validate_base(data, component) errors += all_greater_than_zero(data, component, "u_sigma") errors += all_greater_than_zero(data, component, "u_measured") @@ -717,7 +719,7 @@ def validate_generic_voltage_sensor(data: SingleDataset, component: str) -> List return errors -def validate_generic_power_sensor(data: SingleDataset, component: str) -> List[ValidationError]: +def validate_generic_power_sensor(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: errors = validate_base(data, component) errors += all_greater_than_zero(data, component, "power_sigma") errors += all_valid_enum_values(data, component, "measured_terminal_type", MeasuredTerminalType) @@ -827,7 +829,7 @@ def validate_fault(data: SingleDataset) -> List[ValidationError]: return errors -def validate_regulator(data: SingleDataset, component: str) -> List[ValidationError]: +def validate_regulator(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: errors = validate_base(data, component) errors += all_valid_ids( data, From 5f367a8c484917e981a0608fab075f3633f7ae67 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Thu, 20 Jun 2024 12:28:31 +0200 Subject: [PATCH 18/47] Fixed issues with new data types Signed-off-by: Santiago Figueroa --- src/power_grid_model/_utils.py | 3 ++- src/power_grid_model/validation/errors.py | 7 ++++--- src/power_grid_model/validation/validation.py | 6 ++++-- tests/unit/test_serialization.py | 7 +++++-- 4 files changed, 15 insertions(+), 8 deletions(-) diff --git a/src/power_grid_model/_utils.py b/src/power_grid_model/_utils.py index 6cbddacdf..56280155c 100644 --- a/src/power_grid_model/_utils.py +++ b/src/power_grid_model/_utils.py @@ -103,7 +103,8 @@ def get_and_verify_batch_sizes(batch_data: BatchDataset) -> int: if len(checked_components) == 1: checked_components_str = f"'{checked_components.pop()}'" else: - checked_components_str = "/".join(sorted(checked_components)) + str_checked_components = [str(component) for component in checked_components] + checked_components_str = "/".join(sorted(str_checked_components)) raise ValueError( f"Inconsistent number of batches in batch data. " f"Component '{component}' contains {n_component_batch_size} batches, " diff --git a/src/power_grid_model/validation/errors.py b/src/power_grid_model/validation/errors.py index 97a1691a1..fb08989c1 100644 --- a/src/power_grid_model/validation/errors.py +++ b/src/power_grid_model/validation/errors.py @@ -178,7 +178,7 @@ def __init__(self, fields: List[Tuple[PowerGridComponent, str]], ids: List[Tuple fields: List of field names, formatted as tuples (component, field) ids: List of component IDs (not row indices), formatted as tuples (component, id) """ - self.component = sorted(set(component for component, _ in fields)) + self.component = sorted(set(component for component, _ in fields), key=str) self.field = sorted(fields) self.ids = sorted(ids) @@ -189,7 +189,8 @@ def __init__(self, fields: List[Tuple[PowerGridComponent, str]], ids: List[Tuple @property def component_str(self) -> str: - return "/".join(self.component) + str_components = [str(component) for component in self.component] + return "/".join(str_components) @property def field_str(self) -> str: @@ -333,7 +334,7 @@ def __init__( ): # pylint: disable=too-many-arguments super().__init__(component=component, field=field, ids=ids) - self.ref_components = [ref_components] if isinstance(ref_components, str) else ref_components + self.ref_components = [ref_components] if isinstance(ref_components, str) else ref_components # type: ignore self.filters = filters if filters else None @property diff --git a/src/power_grid_model/validation/validation.py b/src/power_grid_model/validation/validation.py index bee85ed86..32ebd5655 100644 --- a/src/power_grid_model/validation/validation.py +++ b/src/power_grid_model/validation/validation.py @@ -228,7 +228,9 @@ def validate_ids_exist( def _process_power_sigma_and_p_q_sigma( - data: SingleDataset, sensor: PowerGridComponent, required_list: Dict[str, List[Union[str, List[str]]]] + data: SingleDataset, + sensor: PowerGridComponent, + required_list: Dict[Union[PowerGridComponent, str], List[Union[str, List[str]]]], ) -> None: """ Helper function to process the required list when both `p_sigma` and `q_sigma` exist @@ -279,7 +281,7 @@ def validate_required_values( An empty list if all required data is available, or a list of MissingValueErrors. """ # Base - required: Dict[str, List[Union[str, List[str]]]] = {"base": ["id"]} + required: Dict[Union[PowerGridComponent, str], List[Union[str, List[str]]]] = {"base": ["id"]} # Nodes required["node"] = required["base"] + ["u_rated"] diff --git a/tests/unit/test_serialization.py b/tests/unit/test_serialization.py index 76136a693..638d05927 100644 --- a/tests/unit/test_serialization.py +++ b/tests/unit/test_serialization.py @@ -10,6 +10,7 @@ import pytest from power_grid_model.core.power_grid_dataset import get_dataset_type +from power_grid_model.dataset_definitions import PowerGridComponent from power_grid_model.utils import json_deserialize, json_serialize, msgpack_deserialize, msgpack_serialize @@ -266,7 +267,9 @@ def assert_almost_equal(value: np.ndarray, reference: Any): def assert_scenario_correct( - deserialized_dataset: Mapping[str, np.ndarray], serialized_dataset: Mapping[str, Any], sparse_components: List[str] + deserialized_dataset: Mapping[PowerGridComponent, np.ndarray], + serialized_dataset: Mapping[str, Any], + sparse_components: List[PowerGridComponent], ): for key in serialized_dataset["data"]: if key not in deserialized_dataset: @@ -291,7 +294,7 @@ def assert_scenario_correct( def assert_serialization_correct( - deserialized_dataset: Mapping[str, Union[np.ndarray, Mapping[str, np.ndarray]]], + deserialized_dataset: Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], serialized_dataset: Mapping[str, Any], ): """Assert the dataset correctly reprensents the input data.""" From ffcb3a26d712c9144dcef957357b32cd8eb2a2f4 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Thu, 20 Jun 2024 14:25:56 +0200 Subject: [PATCH 19/47] Fixed issues with new data types Signed-off-by: Santiago Figueroa --- scripts/quick_example.py | 11 ++++++++++- scripts/quick_example_batch.py | 12 ++++++++++-- src/power_grid_model/validation/utils.py | 2 +- tests/unit/validation/test_assertions.py | 16 ++++++++-------- 4 files changed, 29 insertions(+), 12 deletions(-) diff --git a/scripts/quick_example.py b/scripts/quick_example.py index 5686fb3ee..95a7a2781 100644 --- a/scripts/quick_example.py +++ b/scripts/quick_example.py @@ -2,9 +2,13 @@ # # SPDX-License-Identifier: MPL-2.0 +from typing import Dict + +import numpy as np import pandas as pd from power_grid_model import LoadGenType, PowerGridModel, initialize_array +from power_grid_model.dataset_definitions import PowerGridComponent # node node = initialize_array("input", "node", 2) @@ -41,7 +45,12 @@ source["u_ref"] = [1.0] # input_data -input_data = {"node": node, "line": line, "sym_load": sym_load, "source": source} +input_data: Dict[PowerGridComponent, np.ndarray] = { + "node": node, + "line": line, + "sym_load": sym_load, + "source": source, +} # call constructor model = PowerGridModel(input_data, system_frequency=50.0) diff --git a/scripts/quick_example_batch.py b/scripts/quick_example_batch.py index 1b6608d04..755858b67 100644 --- a/scripts/quick_example_batch.py +++ b/scripts/quick_example_batch.py @@ -2,10 +2,13 @@ # # SPDX-License-Identifier: MPL-2.0 +from typing import Dict + import numpy as np import pandas as pd from power_grid_model import LoadGenType, PowerGridModel, initialize_array +from power_grid_model.dataset_definitions import PowerGridComponent """ node_1 ---line_3--- node_2 ---line_6--- node_7 @@ -54,7 +57,12 @@ source["u_ref"] = [1.0] # input_data -input_data = {"node": node, "line": line, "asym_load": asym_load, "source": source} +input_data: Dict[PowerGridComponent, np.ndarray] = { + "node": node, + "line": line, + "asym_load": asym_load, + "source": source, +} # call constructor model = PowerGridModel(input_data, system_frequency=50.0) @@ -73,7 +81,7 @@ batch_load = initialize_array("update", "asym_load", (1000, 2)) batch_load["id"] = [[4, 8]] batch_load["p_specified"] = batch_p -batch_update = {"asym_load": batch_load} +batch_update: Dict[PowerGridComponent, np.ndarray] = {"asym_load": batch_load} result = model.calculate_power_flow(symmetric=False, update_data=batch_update) print(result["node"]["u"].shape) # 1000 (scenarios) *3 (nodes) *3 (phases) diff --git a/src/power_grid_model/validation/utils.py b/src/power_grid_model/validation/utils.py index 326a9e48c..8f1da55b9 100644 --- a/src/power_grid_model/validation/utils.py +++ b/src/power_grid_model/validation/utils.py @@ -248,7 +248,7 @@ def get_valid_ids( """ # For convenience, ref_component may be a string and we'll convert it to a 'list' containing that string as it's # single element. - if isinstance(ref_components, str): + if not isinstance(ref_components, list): ref_components = [ref_components] # Create a set of ids by chaining the ids of all ref_components diff --git a/tests/unit/validation/test_assertions.py b/tests/unit/validation/test_assertions.py index 57ed5cf6d..17d4d1592 100644 --- a/tests/unit/validation/test_assertions.py +++ b/tests/unit/validation/test_assertions.py @@ -28,10 +28,10 @@ def test_validation_exception(errors_to_string_mock: MagicMock): def test_assert_valid_input_data(validate_mock: MagicMock): validate_mock.return_value = None assert_valid_input_data( - input_data={"foo": np.array([1])}, calculation_type=CalculationType.state_estimation, symmetric=False + input_data={"foo": np.array([1])}, calculation_type=CalculationType.state_estimation, symmetric=False # type: ignore ) validate_mock.assert_called_once_with( - input_data={"foo": np.array([1])}, calculation_type=CalculationType.state_estimation, symmetric=False + input_data={"foo": np.array([1])}, calculation_type=CalculationType.state_estimation, symmetric=False # type: ignore ) validate_mock.return_value = [ValidationError()] @@ -42,15 +42,15 @@ def test_assert_valid_input_data(validate_mock: MagicMock): @patch("power_grid_model.validation.assertions.validate_batch_data") def test_assert_valid_batch_data(validate_mock: MagicMock): validate_mock.return_value = None - assert_valid_batch_data( - input_data={"foo": np.array([1])}, - update_data={"bar": np.array([2])}, + assert_valid_batch_data( # type: ignore + input_data={"foo": np.array([1])}, # type: ignore + update_data={"bar": np.array([2])}, # type: ignore calculation_type=CalculationType.state_estimation, symmetric=False, ) - validate_mock.assert_called_once_with( - input_data={"foo": np.array([1])}, - update_data={"bar": np.array([2])}, + validate_mock.assert_called_once_with( # type: ignore + input_data={"foo": np.array([1])}, # type: ignore + update_data={"bar": np.array([2])}, # type: ignore calculation_type=CalculationType.state_estimation, symmetric=False, ) From 7bcf96ba018402322714acaf5ebf579ccfa7d007 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Thu, 20 Jun 2024 14:36:25 +0200 Subject: [PATCH 20/47] Fixed issues with new data types Signed-off-by: Santiago Figueroa --- tests/unit/test_internal_utils.py | 2 +- tests/unit/test_power_grid_model.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_internal_utils.py b/tests/unit/test_internal_utils.py index 385802191..b003f12db 100644 --- a/tests/unit/test_internal_utils.py +++ b/tests/unit/test_internal_utils.py @@ -406,7 +406,7 @@ def test_convert_get_and_verify_batch_sizes_inconsistent_batch_sizes_more_than_t @patch("power_grid_model._utils.get_and_verify_batch_sizes") def test_convert_batch_dataset_to_batch_list_missing_key_sparse(_mock: MagicMock): - update_data: BatchDataset = {"foo": {"a": np.empty(3), "data": np.empty(3)}} + update_data: BatchDataset = {"foo": {"a": np.empty(3), "data": np.empty(3)}} # type: ignore with pytest.raises( KeyError, match="Missing 'indptr' in sparse batch data for 'foo' " diff --git a/tests/unit/test_power_grid_model.py b/tests/unit/test_power_grid_model.py index ebe93ca62..2fc7bb2cf 100644 --- a/tests/unit/test_power_grid_model.py +++ b/tests/unit/test_power_grid_model.py @@ -3,11 +3,13 @@ # SPDX-License-Identifier: MPL-2.0 from copy import copy +from typing import Dict import numpy as np import pytest from power_grid_model import PowerGridModel, initialize_array +from power_grid_model.dataset_definitions import PowerGridComponent from power_grid_model.errors import PowerGridBatchError, PowerGridError from power_grid_model.validation import assert_valid_input_data @@ -76,7 +78,7 @@ def test_simple_update(model: PowerGridModel, case_data): update_batch = case_data["update_batch"] source_indptr = update_batch["source"]["indptr"] source_update = update_batch["source"]["data"] - update_data = { + update_data: Dict[PowerGridComponent, np.ndarray] = { "source": source_update[source_indptr[0] : source_indptr[1]], "sym_load": update_batch["sym_load"][0, :], } From 21ca80127eff20c6782f873d190622edde040238 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Thu, 20 Jun 2024 14:45:24 +0200 Subject: [PATCH 21/47] Fixed issues with new data types Signed-off-by: Santiago Figueroa --- tests/unit/test_power_grid_model.py | 4 ++-- tests/unit/validation/test_validation_functions.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/unit/test_power_grid_model.py b/tests/unit/test_power_grid_model.py index 2fc7bb2cf..829a386b6 100644 --- a/tests/unit/test_power_grid_model.py +++ b/tests/unit/test_power_grid_model.py @@ -91,7 +91,7 @@ def test_simple_update(model: PowerGridModel, case_data): def test_update_error(model: PowerGridModel): load_update = initialize_array("update", "sym_load", 1) load_update["id"] = 5 - update_data = {"sym_load": load_update} + update_data: Dict[PowerGridComponent, np.ndarray] = {"sym_load": load_update} with pytest.raises(PowerGridError, match="The id cannot be found:"): model.update(update_data=update_data) @@ -155,7 +155,7 @@ def test_batch_calculation_error_continue(model: PowerGridModel, case_data): assert "The id cannot be found:" in error.error_messages[0] # assert value result for scenario 0 result = {"node": result["node"][error.succeeded_scenarios, :]} - expected_result = {"node": case_data["output_batch"]["node"][error.succeeded_scenarios, :]} + expected_result: Dict[PowerGridComponent, np.ndarray] = {"node": case_data["output_batch"]["node"][error.succeeded_scenarios, :]} compare_result(result, expected_result, rtol=0.0, atol=1e-8) # general error before the batch with pytest.raises(PowerGridError, match="The calculation method is invalid for this calculation!"): diff --git a/tests/unit/validation/test_validation_functions.py b/tests/unit/validation/test_validation_functions.py index 5cdd0f4d6..e3d598066 100644 --- a/tests/unit/validation/test_validation_functions.py +++ b/tests/unit/validation/test_validation_functions.py @@ -611,7 +611,7 @@ def test_validate_generic_power_sensor__all_terminal_types( all_valid_ids: MagicMock, measured_terminal_type: MeasuredTerminalType ): # Act - validate_generic_power_sensor(data={}, component="") + validate_generic_power_sensor(data={}, component="") # type: ignore # Assert all_valid_ids.assert_any_call( @@ -642,7 +642,7 @@ def test_validate_generic_power_sensor__terminal_types( all_valid_ids: MagicMock, ref_component: Union[str, List[str]], measured_terminal_type: MeasuredTerminalType ): # Act - validate_generic_power_sensor(data={}, component="") + validate_generic_power_sensor(data={}, component="") # type: ignore # Assert all_valid_ids.assert_any_call( From dfb5172c2938a3f55ccffd99275dd2c17ec4d36f Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Thu, 20 Jun 2024 14:48:17 +0200 Subject: [PATCH 22/47] Fixed issues with new data types Signed-off-by: Santiago Figueroa --- tests/unit/test_power_grid_model.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_power_grid_model.py b/tests/unit/test_power_grid_model.py index 829a386b6..3f0edc8be 100644 --- a/tests/unit/test_power_grid_model.py +++ b/tests/unit/test_power_grid_model.py @@ -83,7 +83,7 @@ def test_simple_update(model: PowerGridModel, case_data): "sym_load": update_batch["sym_load"][0, :], } model.update(update_data=update_data) - expected_result = {"node": case_data["output_batch"]["node"][0, :]} + expected_result: Dict[PowerGridComponent, np.ndarray] = {"node": case_data["output_batch"]["node"][0, :]} result = model.calculate_power_flow() compare_result(result, expected_result, rtol=0.0, atol=1e-8) @@ -155,7 +155,9 @@ def test_batch_calculation_error_continue(model: PowerGridModel, case_data): assert "The id cannot be found:" in error.error_messages[0] # assert value result for scenario 0 result = {"node": result["node"][error.succeeded_scenarios, :]} - expected_result: Dict[PowerGridComponent, np.ndarray] = {"node": case_data["output_batch"]["node"][error.succeeded_scenarios, :]} + expected_result: Dict[PowerGridComponent, np.ndarray] = { + "node": case_data["output_batch"]["node"][error.succeeded_scenarios, :] + } compare_result(result, expected_result, rtol=0.0, atol=1e-8) # general error before the batch with pytest.raises(PowerGridError, match="The calculation method is invalid for this calculation!"): From dd694e16d4f9e2b856b0caa4e8217a65abd0120d Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Thu, 20 Jun 2024 15:16:44 +0200 Subject: [PATCH 23/47] PowerGridComponent -> ComponentType. PowerGridDataType -> DataType Signed-off-by: Santiago Figueroa --- .../power_grid_model/metadata_enums.py.jinja | 24 +++--- docs/api_reference/python-api-reference.md | 4 +- scripts/quick_example.py | 4 +- scripts/quick_example_batch.py | 6 +- src/power_grid_model/__init__.py | 2 +- src/power_grid_model/_utils.py | 8 +- src/power_grid_model/core/buffer_handling.py | 4 +- src/power_grid_model/core/data_handling.py | 16 ++-- .../core/power_grid_dataset.py | 44 +++++----- src/power_grid_model/core/power_grid_meta.py | 8 +- src/power_grid_model/core/power_grid_model.py | 56 ++++++------- src/power_grid_model/core/serialization.py | 42 +++++----- src/power_grid_model/data_types.py | 8 +- src/power_grid_model/dataset_definitions.py | 24 +++--- src/power_grid_model/utils.py | 8 +- src/power_grid_model/validation/errors.py | 42 +++++----- src/power_grid_model/validation/rules.py | 80 +++++++++---------- src/power_grid_model/validation/utils.py | 14 ++-- src/power_grid_model/validation/validation.py | 28 +++---- tests/unit/test_dataset_definitions.py | 6 +- tests/unit/test_power_grid_model.py | 10 +-- tests/unit/test_serialization.py | 8 +- tests/unit/utils.py | 4 +- 23 files changed, 220 insertions(+), 230 deletions(-) diff --git a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja index 3a06ee927..a3aace202 100644 --- a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja +++ b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja @@ -23,7 +23,7 @@ sys.path.append(sys_path) # pylint: disable=invalid-name -class PowerGridDataTypes(Enum): +class DataTypes(Enum): """Dataset types.""" {% for dataset_type in dataset_types %} @@ -31,25 +31,25 @@ class PowerGridDataTypes(Enum): {% endfor %} -PowerGridDataTypesLiteral = Literal[ +DataTypesLiteral = Literal[ {% for dataset_type in dataset_types %} "{{ dataset_type }}", {% endfor %} ] -PowerGridDataType = Union[PowerGridDataTypes, PowerGridDataTypesLiteral] +DataType = Union[DataTypes, DataTypesLiteral] """ -A PowerGridDataType is the type of a :class:`BatchDataset`. +A DataType is the type of a :class:`BatchDataset`. - Examples: - - PowerGridDataType.input = "input" - - PowerGridDataType.update = "update" + - DataType.input = "input" + - DataType.update = "update" """ -class PowerGridComponents(Enum): +class ComponentTypes(Enum): """Grid component types.""" {% for component in components %} @@ -57,21 +57,21 @@ class PowerGridComponents(Enum): {% endfor %} -PowerGridComponentsLiteral = Literal[ +ComponentTypesLiteral = Literal[ {% for component in components %} "{{ component }}", {% endfor %} ] -PowerGridComponent = Union[PowerGridComponents, PowerGridComponentsLiteral] +ComponentType = Union[ComponentTypes, ComponentTypesLiteral] """ -A PowerGridComponent is the type of a grid component. +A ComponentType is the type of a grid component. - Examples: - - PowerGridComponent.node = "node" - - PowerGridComponent.line = "line" + - ComponentType.node = "node" + - ComponentType.line = "line" """ # pylint: enable=invalid-name diff --git a/docs/api_reference/python-api-reference.md b/docs/api_reference/python-api-reference.md index 94508224c..a9b1f5220 100644 --- a/docs/api_reference/python-api-reference.md +++ b/docs/api_reference/python-api-reference.md @@ -32,8 +32,8 @@ SPDX-License-Identifier: MPL-2.0 .. autoclass:: power_grid_model.data_types.BatchArray .. autoclass:: power_grid_model.data_types.DenseBatchArray .. autoclass:: power_grid_model.data_types.SparseBatchArray -.. autoclass:: power_grid_model.dataset_definitions.PowerGridDataType -.. autoclass:: power_grid_model.dataset_definitions.PowerGridDataType +.. autoclass:: power_grid_model.dataset_definitions.DataType +.. autoclass:: power_grid_model.dataset_definitions.DataType ``` ## error types diff --git a/scripts/quick_example.py b/scripts/quick_example.py index 95a7a2781..c2f9bbae1 100644 --- a/scripts/quick_example.py +++ b/scripts/quick_example.py @@ -8,7 +8,7 @@ import pandas as pd from power_grid_model import LoadGenType, PowerGridModel, initialize_array -from power_grid_model.dataset_definitions import PowerGridComponent +from power_grid_model.dataset_definitions import ComponentType # node node = initialize_array("input", "node", 2) @@ -45,7 +45,7 @@ source["u_ref"] = [1.0] # input_data -input_data: Dict[PowerGridComponent, np.ndarray] = { +input_data: Dict[ComponentType, np.ndarray] = { "node": node, "line": line, "sym_load": sym_load, diff --git a/scripts/quick_example_batch.py b/scripts/quick_example_batch.py index 755858b67..0d520f2e5 100644 --- a/scripts/quick_example_batch.py +++ b/scripts/quick_example_batch.py @@ -8,7 +8,7 @@ import pandas as pd from power_grid_model import LoadGenType, PowerGridModel, initialize_array -from power_grid_model.dataset_definitions import PowerGridComponent +from power_grid_model.dataset_definitions import ComponentType """ node_1 ---line_3--- node_2 ---line_6--- node_7 @@ -57,7 +57,7 @@ source["u_ref"] = [1.0] # input_data -input_data: Dict[PowerGridComponent, np.ndarray] = { +input_data: Dict[ComponentType, np.ndarray] = { "node": node, "line": line, "asym_load": asym_load, @@ -81,7 +81,7 @@ batch_load = initialize_array("update", "asym_load", (1000, 2)) batch_load["id"] = [[4, 8]] batch_load["p_specified"] = batch_p -batch_update: Dict[PowerGridComponent, np.ndarray] = {"asym_load": batch_load} +batch_update: Dict[ComponentType, np.ndarray] = {"asym_load": batch_load} result = model.calculate_power_flow(symmetric=False, update_data=batch_update) print(result["node"]["u"].shape) # 1000 (scenarios) *3 (nodes) *3 (phases) diff --git a/src/power_grid_model/__init__.py b/src/power_grid_model/__init__.py index 9c4100d23..5cb43f40a 100644 --- a/src/power_grid_model/__init__.py +++ b/src/power_grid_model/__init__.py @@ -6,7 +6,7 @@ from power_grid_model.core.power_grid_meta import initialize_array, power_grid_meta_data from power_grid_model.core.power_grid_model import PowerGridModel -from power_grid_model.dataset_definitions import PowerGridComponent, PowerGridDataType +from power_grid_model.dataset_definitions import ComponentType, DataType from power_grid_model.enum import ( Branch3Side, BranchSide, diff --git a/src/power_grid_model/_utils.py b/src/power_grid_model/_utils.py index 56280155c..d4ef5f99c 100644 --- a/src/power_grid_model/_utils.py +++ b/src/power_grid_model/_utils.py @@ -24,7 +24,7 @@ SinglePythonDataset, SparseBatchArray, ) -from power_grid_model.dataset_definitions import PowerGridComponent +from power_grid_model.dataset_definitions import ComponentType def is_nan(data) -> bool: @@ -96,7 +96,7 @@ def get_and_verify_batch_sizes(batch_data: BatchDataset) -> int: """ n_batch_size = 0 - checked_components: List[PowerGridComponent] = [] + checked_components: List[ComponentType] = [] for component, data in batch_data.items(): n_component_batch_size = get_batch_size(data) if checked_components and n_component_batch_size != n_batch_size: @@ -145,7 +145,7 @@ def get_batch_size(batch_data: BatchArray) -> int: return n_batches -def split_numpy_array_in_batches(data: np.ndarray, component: PowerGridComponent) -> List[np.ndarray]: +def split_numpy_array_in_batches(data: np.ndarray, component: ComponentType) -> List[np.ndarray]: """ Split a single dense numpy array into one or more batches @@ -172,7 +172,7 @@ def split_numpy_array_in_batches(data: np.ndarray, component: PowerGridComponent ) -def split_sparse_batches_in_batches(batch_data: SparseBatchArray, component: PowerGridComponent) -> List[np.ndarray]: +def split_sparse_batches_in_batches(batch_data: SparseBatchArray, component: ComponentType) -> List[np.ndarray]: """ Split a single numpy array representing, a compressed sparse structure, into one or more batches diff --git a/src/power_grid_model/core/buffer_handling.py b/src/power_grid_model/core/buffer_handling.py index 9fa4370c6..fc7b92029 100644 --- a/src/power_grid_model/core/buffer_handling.py +++ b/src/power_grid_model/core/buffer_handling.py @@ -16,7 +16,7 @@ from power_grid_model.core.index_integer import IdxC, IdxNp from power_grid_model.core.power_grid_core import IdxPtr, VoidPtr from power_grid_model.core.power_grid_meta import ComponentMetaData -from power_grid_model.dataset_definitions import PowerGridComponent +from power_grid_model.dataset_definitions import ComponentType @dataclass @@ -233,7 +233,7 @@ def get_buffer_view(data: Union[np.ndarray, Mapping[str, np.ndarray]], schema: C def create_buffer( properties: BufferProperties, schema: ComponentMetaData -) -> Union[np.ndarray, Dict[PowerGridComponent, np.ndarray]]: +) -> Union[np.ndarray, Dict[ComponentType, np.ndarray]]: """ Create a buffer with the provided properties and type. diff --git a/src/power_grid_model/core/data_handling.py b/src/power_grid_model/core/data_handling.py index 00bbd446d..b5c5522f4 100644 --- a/src/power_grid_model/core/data_handling.py +++ b/src/power_grid_model/core/data_handling.py @@ -14,7 +14,7 @@ from power_grid_model.core.power_grid_dataset import CConstDataset, CMutableDataset from power_grid_model.core.power_grid_meta import initialize_array, power_grid_meta_data -from power_grid_model.dataset_definitions import PowerGridComponent +from power_grid_model.dataset_definitions import ComponentType from power_grid_model.enum import CalculationType @@ -53,7 +53,7 @@ def get_output_type(*, calculation_type: CalculationType, symmetric: bool) -> Ou raise NotImplementedError() -def prepare_input_view(input_data: Mapping[PowerGridComponent, np.ndarray]) -> CConstDataset: +def prepare_input_view(input_data: Mapping[ComponentType, np.ndarray]) -> CConstDataset: """ Create a view of the input data in a format compatible with the PGM core libary. @@ -68,7 +68,7 @@ def prepare_input_view(input_data: Mapping[PowerGridComponent, np.ndarray]) -> C def prepare_update_view( - update_data: Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]] + update_data: Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]] ) -> CConstDataset: """ Create a view of the update data, or an empty view if not provided, in a format compatible with the PGM core libary. @@ -83,9 +83,7 @@ def prepare_update_view( return CConstDataset(update_data, dataset_type="update") -def prepare_output_view( - output_data: Mapping[PowerGridComponent, np.ndarray], output_type: OutputType -) -> CMutableDataset: +def prepare_output_view(output_data: Mapping[ComponentType, np.ndarray], output_type: OutputType) -> CMutableDataset: """ create a view of the output data in a format compatible with the PGM core libary. @@ -102,12 +100,12 @@ def prepare_output_view( def create_output_data( - output_component_types: Union[Set[PowerGridComponent], List[PowerGridComponent]], + output_component_types: Union[Set[ComponentType], List[ComponentType]], output_type: OutputType, - all_component_count: Dict[PowerGridComponent, int], + all_component_count: Dict[ComponentType, int], is_batch: bool, batch_size: int, -) -> Dict[PowerGridComponent, np.ndarray]: +) -> Dict[ComponentType, np.ndarray]: """ Create the output data that the user can use. always returns batch type output data. Use reduce_output_data to flatten to single scenario output if applicable. diff --git a/src/power_grid_model/core/power_grid_dataset.py b/src/power_grid_model/core/power_grid_dataset.py index b3268b28a..82b110658 100644 --- a/src/power_grid_model/core/power_grid_dataset.py +++ b/src/power_grid_model/core/power_grid_dataset.py @@ -26,7 +26,7 @@ power_grid_core as pgc, ) from power_grid_model.core.power_grid_meta import DatasetMetaData, power_grid_meta_data -from power_grid_model.dataset_definitions import PowerGridComponent, PowerGridDataType +from power_grid_model.dataset_definitions import ComponentType, DataType from power_grid_model.errors import PowerGridError @@ -83,7 +83,7 @@ def n_components(self) -> int: """ return pgc.dataset_info_n_components(self._info) - def components(self) -> List[PowerGridComponent]: + def components(self) -> List[ComponentType]: """ The components in the dataset. @@ -92,7 +92,7 @@ def components(self) -> List[PowerGridComponent]: """ return [pgc.dataset_info_component_name(self._info, idx) for idx in range(self.n_components())] - def elements_per_scenario(self) -> Mapping[PowerGridComponent, int]: + def elements_per_scenario(self) -> Mapping[ComponentType, int]: """ The number of elements per scenario in the dataset. @@ -105,7 +105,7 @@ def elements_per_scenario(self) -> Mapping[PowerGridComponent, int]: for idx, component_name in enumerate(self.components()) } - def total_elements(self) -> Mapping[PowerGridComponent, int]: + def total_elements(self) -> Mapping[ComponentType, int]: """ The total number of elements in the dataset. @@ -120,9 +120,7 @@ def total_elements(self) -> Mapping[PowerGridComponent, int]: } -def get_dataset_type( - data: Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]] -) -> PowerGridDataType: +def get_dataset_type(data: Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]]) -> DataType: """ Deduce the dataset type from the provided dataset. @@ -177,7 +175,7 @@ class CMutableDataset: The dataset will create mutable buffers that the Power Grid Model can use to load data. """ - _dataset_type: PowerGridDataType + _dataset_type: DataType _schema: DatasetMetaData _is_batch: bool _batch_size: int @@ -187,10 +185,10 @@ class CMutableDataset: def __new__( cls, data: Union[ - Mapping[PowerGridComponent, np.ndarray], - Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + Mapping[ComponentType, np.ndarray], + Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], - dataset_type: Optional[PowerGridDataType] = None, + dataset_type: Optional[DataType] = None, ): instance = super().__new__(cls) instance._mutable_dataset = MutableDatasetPtr() @@ -247,8 +245,8 @@ def get_buffer_views(self) -> List[CBuffer]: def _add_data( self, data: Union[ - Mapping[PowerGridComponent, np.ndarray], - Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + Mapping[ComponentType, np.ndarray], + Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], ): """ @@ -267,7 +265,7 @@ def _add_data( def _add_component_data( self, - component: PowerGridComponent, + component: ComponentType, data: Union[np.ndarray, Mapping[str, np.ndarray]], allow_unknown: bool = False, ): @@ -335,10 +333,10 @@ class CConstDataset: def __new__( cls, data: Union[ - Mapping[PowerGridComponent, np.ndarray], - Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + Mapping[ComponentType, np.ndarray], + Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], - dataset_type: Optional[PowerGridDataType] = None, + dataset_type: Optional[DataType] = None, ): instance = super().__new__(cls) instance._const_dataset = ConstDatasetPtr() @@ -391,7 +389,7 @@ def __init__(self, dataset_ptr: WritableDatasetPtr): self._schema = power_grid_meta_data[self._dataset_type] self._component_buffer_properties = self._get_buffer_properties(info) - self._data: Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]] = {} + self._data: Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]] = {} self._buffers: Mapping[str, CBuffer] = {} self._add_buffers() @@ -415,7 +413,7 @@ def get_info(self) -> CDatasetInfo: """ return CDatasetInfo(pgc.dataset_writable_get_info(self._writable_dataset)) - def get_data(self) -> Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]: + def get_data(self) -> Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]: """ Retrieve data from the Power Grid Model dataset. @@ -426,7 +424,7 @@ def get_data(self) -> Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.nd """ return self._data - def get_component_data(self, component: PowerGridComponent) -> Union[np.ndarray, Mapping[str, np.ndarray]]: + def get_component_data(self, component: ComponentType) -> Union[np.ndarray, Mapping[str, np.ndarray]]: """ Retrieve Power Grid Model data from the dataset for a specific component. @@ -442,20 +440,20 @@ def _add_buffers(self): for component, buffer_properties in self._component_buffer_properties.items(): self._add_buffer(component, buffer_properties) - def _add_buffer(self, component: PowerGridComponent, buffer_properties: BufferProperties): + def _add_buffer(self, component: ComponentType, buffer_properties: BufferProperties): schema = self._schema[component] self._data[component] = create_buffer(buffer_properties, schema) self._register_buffer(component, get_buffer_view(self._data[component], schema)) - def _register_buffer(self, component: PowerGridComponent, buffer: CBuffer): + def _register_buffer(self, component: ComponentType, buffer: CBuffer): pgc.dataset_writable_set_buffer( dataset=self._writable_dataset, component=component, indptr=buffer.indptr, data=buffer.data ) assert_no_error() @staticmethod - def _get_buffer_properties(info: CDatasetInfo) -> Mapping[PowerGridComponent, BufferProperties]: + def _get_buffer_properties(info: CDatasetInfo) -> Mapping[ComponentType, BufferProperties]: is_batch = info.is_batch() batch_size = info.batch_size() components = info.components() diff --git a/src/power_grid_model/core/power_grid_meta.py b/src/power_grid_model/core/power_grid_meta.py index 0f45d93d9..c85477d3a 100644 --- a/src/power_grid_model/core/power_grid_meta.py +++ b/src/power_grid_model/core/power_grid_meta.py @@ -13,7 +13,7 @@ import numpy as np from power_grid_model.core.power_grid_core import AttributePtr, ComponentPtr, DatasetPtr, power_grid_core as pgc -from power_grid_model.dataset_definitions import PowerGridComponent, PowerGridDataType +from power_grid_model.dataset_definitions import ComponentType, DataType # constant enum for ctype @@ -61,8 +61,8 @@ def __getitem__(self, item): return getattr(self, item) -DatasetMetaData = Dict[PowerGridComponent, ComponentMetaData] -PowerGridMetaData = Dict[PowerGridDataType, DatasetMetaData] +DatasetMetaData = Dict[ComponentType, ComponentMetaData] +PowerGridMetaData = Dict[DataType, DatasetMetaData] def _generate_meta_data() -> PowerGridMetaData: @@ -158,7 +158,7 @@ def _generate_meta_attributes(component: ComponentPtr) -> dict: def initialize_array( - data_type: PowerGridDataType, component_type: PowerGridComponent, shape: Union[tuple, int], empty: bool = False + data_type: DataType, component_type: ComponentType, shape: Union[tuple, int], empty: bool = False ) -> np.ndarray: """ Initializes an array for use in Power Grid Model calculations diff --git a/src/power_grid_model/core/power_grid_model.py b/src/power_grid_model/core/power_grid_model.py index 3d07586b0..3dada181e 100644 --- a/src/power_grid_model/core/power_grid_model.py +++ b/src/power_grid_model/core/power_grid_model.py @@ -21,7 +21,7 @@ from power_grid_model.core.index_integer import IdNp, IdxNp from power_grid_model.core.options import Options from power_grid_model.core.power_grid_core import ConstDatasetPtr, IDPtr, IdxPtr, ModelPtr, power_grid_core as pgc -from power_grid_model.dataset_definitions import PowerGridComponent +from power_grid_model.dataset_definitions import ComponentType from power_grid_model.enum import ( CalculationMethod, CalculationType, @@ -37,7 +37,7 @@ class PowerGridModel: """ _model_ptr: ModelPtr - _all_component_count: Optional[Dict[PowerGridComponent, int]] + _all_component_count: Optional[Dict[ComponentType, int]] _batch_error: Optional[PowerGridBatchError] @property @@ -57,7 +57,7 @@ def _model(self): return self._model_ptr @property - def all_component_count(self) -> Dict[PowerGridComponent, int]: + def all_component_count(self) -> Dict[ComponentType, int]: """ Get count of number of elements per component type. If the count for a component type is zero, it will not be in the returned dictionary. @@ -94,7 +94,7 @@ def __new__(cls, *_args, **_kwargs): instance._all_component_count = None return instance - def __init__(self, input_data: Dict[PowerGridComponent, np.ndarray], system_frequency: float = 50.0): + def __init__(self, input_data: Dict[ComponentType, np.ndarray], system_frequency: float = 50.0): """ Initialize the model from an input data set. @@ -115,7 +115,7 @@ def __init__(self, input_data: Dict[PowerGridComponent, np.ndarray], system_freq assert_no_error() self._all_component_count = {k: v for k, v in prepared_input.get_info().total_elements().items() if v > 0} - def update(self, *, update_data: Dict[PowerGridComponent, np.ndarray]): + def update(self, *, update_data: Dict[ComponentType, np.ndarray]): """ Update the model with changes. @@ -132,7 +132,7 @@ def update(self, *, update_data: Dict[PowerGridComponent, np.ndarray]): pgc.update_model(self._model, prepared_update.get_dataset_ptr()) assert_no_error() - def get_indexer(self, component_type: PowerGridComponent, ids: np.ndarray): + def get_indexer(self, component_type: ComponentType, ids: np.ndarray): """ Get array of indexers given array of ids for component type @@ -159,7 +159,7 @@ def _get_output_component_count(self, calculation_type: CalculationType): CalculationType.short_circuit: ["sensor"], }.get(calculation_type, []) - def include_type(component_type: PowerGridComponent): + def include_type(component_type: ComponentType): for exclude_type in exclude_types: if exclude_type == component_type: return False @@ -170,12 +170,12 @@ def include_type(component_type: PowerGridComponent): # pylint: disable=too-many-arguments def _construct_output( self, - output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]], + output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]], calculation_type: CalculationType, symmetric: bool, is_batch: bool, batch_size: int, - ) -> Dict[PowerGridComponent, np.ndarray]: + ) -> Dict[ComponentType, np.ndarray]: all_component_count = self._get_output_component_count(calculation_type=calculation_type) # limit all component count to user specified component types in output @@ -218,8 +218,8 @@ def _calculate_impl( self, calculation_type: CalculationType, symmetric: bool, - update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]], - output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]], + update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]], + output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]], options: Options, continue_on_batch_error: bool, decode_error: bool, @@ -283,9 +283,9 @@ def _calculate_power_flow( error_tolerance: float = 1e-8, max_iterations: int = 20, calculation_method: Union[CalculationMethod, str] = CalculationMethod.newton_raphson, - update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, threading: int = -1, - output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]] = None, + output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, tap_changing_strategy: Union[TapChangingStrategy, str] = TapChangingStrategy.disabled, @@ -319,13 +319,13 @@ def _calculate_state_estimation( error_tolerance: float = 1e-8, max_iterations: int = 20, calculation_method: Union[CalculationMethod, str] = CalculationMethod.iterative_linear, - update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, threading: int = -1, - output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]] = None, + output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, experimental_features: Union[_ExperimentalFeatures, str] = _ExperimentalFeatures.disabled, - ) -> Dict[PowerGridComponent, np.ndarray]: + ) -> Dict[ComponentType, np.ndarray]: calculation_type = CalculationType.state_estimation options = self._options( calculation_type=calculation_type, @@ -350,14 +350,14 @@ def _calculate_short_circuit( self, *, calculation_method: Union[CalculationMethod, str] = CalculationMethod.iec60909, - update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, threading: int = -1, - output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]] = None, + output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, short_circuit_voltage_scaling: Union[ShortCircuitVoltageScaling, str] = ShortCircuitVoltageScaling.maximum, experimental_features: Union[_ExperimentalFeatures, str] = _ExperimentalFeatures.disabled, - ) -> Dict[PowerGridComponent, np.ndarray]: + ) -> Dict[ComponentType, np.ndarray]: calculation_type = CalculationType.short_circuit symmetric = False @@ -386,13 +386,13 @@ def calculate_power_flow( error_tolerance: float = 1e-8, max_iterations: int = 20, calculation_method: Union[CalculationMethod, str] = CalculationMethod.newton_raphson, - update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, threading: int = -1, - output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]] = None, + output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, tap_changing_strategy: Union[TapChangingStrategy, str] = TapChangingStrategy.disabled, - ) -> Dict[PowerGridComponent, np.ndarray]: + ) -> Dict[ComponentType, np.ndarray]: """ Calculate power flow once with the current model attributes. Or calculate in batch with the given update dataset in batch. @@ -475,12 +475,12 @@ def calculate_state_estimation( error_tolerance: float = 1e-8, max_iterations: int = 20, calculation_method: Union[CalculationMethod, str] = CalculationMethod.iterative_linear, - update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, threading: int = -1, - output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]] = None, + output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, - ) -> Dict[PowerGridComponent, np.ndarray]: + ) -> Dict[ComponentType, np.ndarray]: """ Calculate state estimation once with the current model attributes. Or calculate in batch with the given update dataset in batch. @@ -556,13 +556,13 @@ def calculate_short_circuit( self, *, calculation_method: Union[CalculationMethod, str] = CalculationMethod.iec60909, - update_data: Optional[Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, threading: int = -1, - output_component_types: Optional[Union[Set[PowerGridComponent], List[PowerGridComponent]]] = None, + output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, short_circuit_voltage_scaling: Union[ShortCircuitVoltageScaling, str] = ShortCircuitVoltageScaling.maximum, - ) -> Dict[PowerGridComponent, np.ndarray]: + ) -> Dict[ComponentType, np.ndarray]: """ Calculate a short circuit once with the current model attributes. Or calculate in batch with the given update dataset in batch diff --git a/src/power_grid_model/core/serialization.py b/src/power_grid_model/core/serialization.py index d2adc63eb..f1d526042 100644 --- a/src/power_grid_model/core/serialization.py +++ b/src/power_grid_model/core/serialization.py @@ -23,7 +23,7 @@ power_grid_core as pgc, ) from power_grid_model.core.power_grid_dataset import CConstDataset, CWritableDataset -from power_grid_model.dataset_definitions import PowerGridComponent, PowerGridDataType +from power_grid_model.dataset_definitions import ComponentType, DataType from power_grid_model.errors import PowerGridSerializationError @@ -64,7 +64,7 @@ def __del__(self): if hasattr(self, "_deserializer"): pgc.destroy_deserializer(self._deserializer) - def load(self) -> Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]: + def load(self) -> Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]: """ Load the deserialized data to a new dataset. @@ -85,8 +85,8 @@ class Serializer(ABC): """ _data: Union[ - Mapping[PowerGridComponent, np.ndarray], - Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + Mapping[ComponentType, np.ndarray], + Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ] _dataset: CConstDataset _serializer: SerializerPtr @@ -94,11 +94,11 @@ class Serializer(ABC): def __new__( cls, data: Union[ - Mapping[PowerGridComponent, np.ndarray], - Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + Mapping[ComponentType, np.ndarray], + Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], serialization_type: SerializationType, - dataset_type: Optional[PowerGridDataType] = None, + dataset_type: Optional[DataType] = None, ): instance = super().__new__(cls) @@ -205,10 +205,10 @@ class JsonSerializer(_StringSerializer): # pylint: disable=too-few-public-metho def __new__( cls, data: Union[ - Mapping[PowerGridComponent, np.ndarray], - Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + Mapping[ComponentType, np.ndarray], + Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], - dataset_type: Optional[PowerGridDataType] = None, + dataset_type: Optional[DataType] = None, ): return super().__new__(cls, data, SerializationType.JSON, dataset_type=dataset_type) @@ -221,15 +221,15 @@ class MsgpackSerializer(_BytesSerializer): # pylint: disable=too-few-public-met def __new__( cls, data: Union[ - Mapping[PowerGridComponent, np.ndarray], - Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + Mapping[ComponentType, np.ndarray], + Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], - dataset_type: Optional[PowerGridDataType] = None, + dataset_type: Optional[DataType] = None, ): return super().__new__(cls, data, SerializationType.MSGPACK, dataset_type=dataset_type) -def json_deserialize(data: Union[str, bytes]) -> Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]: +def json_deserialize(data: Union[str, bytes]) -> Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]: """ Load serialized JSON data to a new dataset. @@ -250,10 +250,10 @@ def json_deserialize(data: Union[str, bytes]) -> Dict[PowerGridComponent, Union[ def json_serialize( data: Union[ - Mapping[PowerGridComponent, np.ndarray], - Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + Mapping[ComponentType, np.ndarray], + Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], - dataset_type: Optional[PowerGridDataType] = None, + dataset_type: Optional[DataType] = None, use_compact_list: bool = False, indent: int = 2, ) -> str: @@ -282,7 +282,7 @@ def json_serialize( return result -def msgpack_deserialize(data: bytes) -> Dict[PowerGridComponent, Union[np.ndarray, Dict[str, np.ndarray]]]: +def msgpack_deserialize(data: bytes) -> Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]: """ Load serialized msgpack data to a new dataset. @@ -303,10 +303,10 @@ def msgpack_deserialize(data: bytes) -> Dict[PowerGridComponent, Union[np.ndarra def msgpack_serialize( data: Union[ - Mapping[PowerGridComponent, np.ndarray], - Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + Mapping[ComponentType, np.ndarray], + Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], - dataset_type: Optional[PowerGridDataType] = None, + dataset_type: Optional[DataType] = None, use_compact_list: bool = False, ) -> bytes: """ diff --git a/src/power_grid_model/data_types.py b/src/power_grid_model/data_types.py index 69b1cdadd..86e94de18 100644 --- a/src/power_grid_model/data_types.py +++ b/src/power_grid_model/data_types.py @@ -11,7 +11,7 @@ import numpy as np -from power_grid_model.dataset_definitions import PowerGridComponent +from power_grid_model.dataset_definitions import ComponentType # When we're dropping python 3.8, we should introduce proper NumPy type hinting @@ -62,7 +62,7 @@ A data array can be a :class:`SingleArray` or a :class:`BatchArray`. """ -SingleDataset = Dict[PowerGridComponent, SingleArray] +SingleDataset = Dict[ComponentType, SingleArray] """ A single dataset is a dictionary where the keys are the component types and the values are :class:`SingleArray` @@ -70,7 +70,7 @@ - Example: {"node": :class:`SingleArray`, "line": :class:`SingleArray`} """ -BatchDataset = Dict[PowerGridComponent, BatchArray] +BatchDataset = Dict[ComponentType, BatchArray] """ A batch dataset is a dictionary where the keys are the component types and the values are :class:`BatchArray` @@ -144,7 +144,7 @@ - Example: [{"id": 1, "u_rated": 10500.0}, {"id": 2, "u_rated": 10500.0}] """ -SinglePythonDataset = Dict[PowerGridComponent, ComponentList] +SinglePythonDataset = Dict[ComponentType, ComponentList] """ A single dataset in native python representation is a dictionary, where the keys are the component names and the values are a list of all the instances of such a component. In essence it stores the same information as a diff --git a/src/power_grid_model/dataset_definitions.py b/src/power_grid_model/dataset_definitions.py index f9f2687ff..2c401ad83 100644 --- a/src/power_grid_model/dataset_definitions.py +++ b/src/power_grid_model/dataset_definitions.py @@ -23,7 +23,7 @@ # pylint: disable=invalid-name -class PowerGridDataTypes(Enum): +class DataTypes(Enum): """Dataset types.""" input = "input" @@ -37,7 +37,7 @@ class PowerGridDataTypes(Enum): sc_output = "sc_output" -PowerGridDataTypesLiteral = Literal[ +DataTypesLiteral = Literal[ "input", "sym_output", "asym_output", @@ -46,18 +46,18 @@ class PowerGridDataTypes(Enum): ] -PowerGridDataType = Union[PowerGridDataTypes, PowerGridDataTypesLiteral] +DataType = Union[DataTypes, DataTypesLiteral] """ -A PowerGridDataType is the type of a :class:`BatchDataset`. +A DataType is the type of a :class:`BatchDataset`. - Examples: - - PowerGridDataType.input = "input" - - PowerGridDataType.update = "update" + - DataType.input = "input" + - DataType.update = "update" """ -class PowerGridComponents(Enum): +class ComponentTypes(Enum): """Grid component types.""" node = "node" @@ -95,7 +95,7 @@ class PowerGridComponents(Enum): fault = "fault" -PowerGridComponentsLiteral = Literal[ +ComponentTypesLiteral = Literal[ "node", "line", "link", @@ -116,14 +116,14 @@ class PowerGridComponents(Enum): ] -PowerGridComponent = Union[PowerGridComponents, PowerGridComponentsLiteral] +ComponentType = Union[ComponentTypes, ComponentTypesLiteral] """ -A PowerGridComponent is the type of a grid component. +A ComponentType is the type of a grid component. - Examples: - - PowerGridComponent.node = "node" - - PowerGridComponent.line = "line" + - ComponentType.node = "node" + - ComponentType.line = "line" """ # pylint: enable=invalid-name diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index 6aa75eb5b..846af6e1a 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -25,7 +25,7 @@ msgpack_serialize, ) from power_grid_model.data_types import BatchArray, BatchDataset, Dataset, SingleDataset -from power_grid_model.dataset_definitions import PowerGridDataType +from power_grid_model.dataset_definitions import DataType from power_grid_model.errors import PowerGridSerializationError _DEPRECATED_FUNCTION_MSG = "This function is deprecated." @@ -108,7 +108,7 @@ def json_deserialize_from_file(file_path: Path) -> Dataset: def json_serialize_to_file( file_path: Path, data: Dataset, - dataset_type: Optional[PowerGridDataType] = None, + dataset_type: Optional[DataType] = None, use_compact_list: bool = False, indent: Optional[int] = 2, ): @@ -151,7 +151,7 @@ def msgpack_deserialize_from_file(file_path: Path) -> Dataset: def msgpack_serialize_to_file( - file_path: Path, data: Dataset, dataset_type: Optional[PowerGridDataType] = None, use_compact_list: bool = False + file_path: Path, data: Dataset, dataset_type: Optional[DataType] = None, use_compact_list: bool = False ): """ Export msgpack data in most recent format. @@ -281,7 +281,7 @@ def import_update_data(json_file: Path) -> BatchDataset: ) -def _compatibility_deprecated_import_json_data(json_file: Path, data_type: PowerGridDataType): +def _compatibility_deprecated_import_json_data(json_file: Path, data_type: DataType): with open(json_file, mode="r", encoding="utf-8") as file_pointer: data = json.load(file_pointer) diff --git a/src/power_grid_model/validation/errors.py b/src/power_grid_model/validation/errors.py index fb08989c1..e46f3cb28 100644 --- a/src/power_grid_model/validation/errors.py +++ b/src/power_grid_model/validation/errors.py @@ -10,7 +10,7 @@ from enum import Enum from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Type, Union -from power_grid_model.dataset_definitions import PowerGridComponent +from power_grid_model.dataset_definitions import ComponentType class ValidationError(ABC): @@ -32,18 +32,18 @@ class ValidationError(ABC): """ - component: Optional[Union[PowerGridComponent, List[PowerGridComponent]]] = None + component: Optional[Union[ComponentType, List[ComponentType]]] = None """ The component, or components, to which the error applies. """ - field: Optional[Union[str, List[str], List[Tuple[PowerGridComponent, str]]]] = None + field: Optional[Union[str, List[str], List[Tuple[ComponentType, str]]]] = None """ The field, or fields, to which the error applies. A field can also be a tuple (component, field) when multiple components are being addressed. """ - ids: Optional[Union[List[int], List[Tuple[PowerGridComponent, int]]]] = None + ids: Optional[Union[List[int], List[Tuple[ComponentType, int]]]] = None """ The object identifiers to which the error applies. A field object identifier can also be a tuple (component, id) when multiple components are being addressed. @@ -115,11 +115,11 @@ class SingleFieldValidationError(ValidationError): """ _message = "Field {field} is not valid for {n} {objects}." - component: PowerGridComponent + component: ComponentType field: str ids: List[int] - def __init__(self, component: PowerGridComponent, field: str, ids: Iterable[int]): + def __init__(self, component: ComponentType, field: str, ids: Iterable[int]): """ Args: component: Component name @@ -137,11 +137,11 @@ class MultiFieldValidationError(ValidationError): """ _message = "Combination of fields {field} is not valid for {n} {objects}." - component: PowerGridComponent + component: ComponentType field: List[str] ids: List[int] - def __init__(self, component: PowerGridComponent, fields: List[str], ids: List[int]): + def __init__(self, component: ComponentType, fields: List[str], ids: List[int]): """ Args: component: Component name @@ -167,12 +167,12 @@ class MultiComponentValidationError(ValidationError): E.g. the two fields `id` fields of the `node` and `line` component: [('node', 'id'), ('line', 'id')]. """ - component: List[PowerGridComponent] - field: List[Tuple[PowerGridComponent, str]] - ids: List[Tuple[PowerGridComponent, int]] + component: List[ComponentType] + field: List[Tuple[ComponentType, str]] + ids: List[Tuple[ComponentType, int]] _message = "Fields {field} are not valid for {n} {objects}." - def __init__(self, fields: List[Tuple[PowerGridComponent, str]], ids: List[Tuple[PowerGridComponent, int]]): + def __init__(self, fields: List[Tuple[ComponentType, str]], ids: List[Tuple[ComponentType, int]]): """ Args: fields: List of field names, formatted as tuples (component, field) @@ -208,7 +208,7 @@ class NotIdenticalError(SingleFieldValidationError): unique: Set[Any] num_unique: int - def __init__(self, component: PowerGridComponent, field: str, ids: Iterable[int], values: List[Any]): + def __init__(self, component: ComponentType, field: str, ids: Iterable[int], values: List[Any]): super().__init__(component, field, ids) self.values = values self.unique = set(self.values) @@ -242,9 +242,7 @@ class InvalidEnumValueError(SingleFieldValidationError): _message = "Field {field} contains invalid {enum} values for {n} {objects}." enum: Union[Type[Enum], List[Type[Enum]]] - def __init__( - self, component: PowerGridComponent, field: str, ids: List[int], enum: Union[Type[Enum], List[Type[Enum]]] - ): + def __init__(self, component: ComponentType, field: str, ids: List[int], enum: Union[Type[Enum], List[Type[Enum]]]): super().__init__(component, field, ids) self.enum = enum @@ -297,7 +295,7 @@ class IdNotInDatasetError(SingleFieldValidationError): _message = "ID does not exist in {ref_dataset} for {n} {objects}." ref_dataset: str - def __init__(self, component: PowerGridComponent, ids: List[int], ref_dataset: str): + def __init__(self, component: ComponentType, ids: List[int], ref_dataset: str): super().__init__(component=component, field="id", ids=ids) self.ref_dataset = ref_dataset @@ -322,14 +320,14 @@ class InvalidIdError(SingleFieldValidationError): """ _message = "Field {field} does not contain a valid {ref_components} id for {n} {objects}. {filters}" - ref_components: List[PowerGridComponent] + ref_components: List[ComponentType] def __init__( self, - component: PowerGridComponent, + component: ComponentType, field: str, ids: List[int], - ref_components: Union[PowerGridComponent, List[PowerGridComponent]], + ref_components: Union[ComponentType, List[ComponentType]], filters: Optional[Dict[str, Any]] = None, ): # pylint: disable=too-many-arguments @@ -377,7 +375,7 @@ class ComparisonError(SingleFieldValidationError): RefType = Union[int, float, str, Tuple[Union[int, float, str], ...]] - def __init__(self, component: PowerGridComponent, field: str, ids: List[int], ref_value: "ComparisonError.RefType"): + def __init__(self, component: ComponentType, field: str, ids: List[int], ref_value: "ComparisonError.RefType"): super().__init__(component, field, ids) self.ref_value = ref_value @@ -485,7 +483,7 @@ class InvalidAssociatedEnumValueError(MultiFieldValidationError): def __init__( self, - component: PowerGridComponent, + component: ComponentType, fields: List[str], ids: List[int], enum: Union[Type[Enum], List[Type[Enum]]], diff --git a/src/power_grid_model/validation/rules.py b/src/power_grid_model/validation/rules.py index d8ab0b3eb..2e1438376 100644 --- a/src/power_grid_model/validation/rules.py +++ b/src/power_grid_model/validation/rules.py @@ -23,7 +23,7 @@ data: SingleDataset The entire input/update data set - component: PowerGridComponent + component: ComponentType The name of the component, which should be an existing key in the data field: str @@ -40,7 +40,7 @@ import numpy as np from power_grid_model.data_types import SingleDataset -from power_grid_model.dataset_definitions import PowerGridComponent +from power_grid_model.dataset_definitions import ComponentType from power_grid_model.enum import FaultPhase, FaultType, WindingType from power_grid_model.validation.errors import ( ComparisonError, @@ -81,14 +81,14 @@ CompError = TypeVar("CompError", bound=ComparisonError) -def all_greater_than_zero(data: SingleDataset, component: PowerGridComponent, field: str) -> List[NotGreaterThanError]: +def all_greater_than_zero(data: SingleDataset, component: ComponentType, field: str) -> List[NotGreaterThanError]: """ Check that for all records of a particular type of component, the values in the 'field' column are greater than zero. Returns an empty list on success, or a list containing a single error object on failure. Args: data (SingleDataset): The input/update data set for all components - component (PowerGridComponent): The component of interest + component (ComponentType): The component of interest field (str): The field of interest Returns: @@ -100,7 +100,7 @@ def all_greater_than_zero(data: SingleDataset, component: PowerGridComponent, fi def all_greater_than_or_equal_to_zero( data: SingleDataset, - component: PowerGridComponent, + component: ComponentType, field: str, default_value: Optional[Union[np.ndarray, int, float]] = None, ) -> List[NotGreaterOrEqualError]: @@ -110,7 +110,7 @@ def all_greater_than_or_equal_to_zero( Args: data (SingleDataset): The input/update data set for all components - component (PowerGridComponent) The component of interest + component (ComponentType) The component of interest field (str): The field of interest default_value (Optional[Union[np.ndarray, int, float]], optional): Some values are not required, but will receive a default value in the C++ core. To do a proper input validation, these default values should be @@ -125,7 +125,7 @@ def all_greater_than_or_equal_to_zero( def all_greater_than( - data: SingleDataset, component: PowerGridComponent, field: str, ref_value: Union[int, float, str] + data: SingleDataset, component: ComponentType, field: str, ref_value: Union[int, float, str] ) -> List[NotGreaterThanError]: """ Check that for all records of a particular type of component, the values in the 'field' column are greater than @@ -152,7 +152,7 @@ def not_greater(val: np.ndarray, *ref: np.ndarray): def all_greater_or_equal( data: SingleDataset, - component: PowerGridComponent, + component: ComponentType, field: str, ref_value: Union[int, float, str], default_value: Optional[Union[np.ndarray, int, float]] = None, @@ -188,7 +188,7 @@ def not_greater_or_equal(val: np.ndarray, *ref: np.ndarray): def all_less_than( - data: SingleDataset, component: PowerGridComponent, field: str, ref_value: Union[int, float, str] + data: SingleDataset, component: ComponentType, field: str, ref_value: Union[int, float, str] ) -> List[NotLessThanError]: """ Check that for all records of a particular type of component, the values in the 'field' column are less than the @@ -214,7 +214,7 @@ def not_less(val: np.ndarray, *ref: np.ndarray): def all_less_or_equal( - data: SingleDataset, component: PowerGridComponent, field: str, ref_value: Union[int, float, str] + data: SingleDataset, component: ComponentType, field: str, ref_value: Union[int, float, str] ) -> List[NotLessOrEqualError]: """ Check that for all records of a particular type of component, the values in the 'field' column are less than, @@ -243,7 +243,7 @@ def not_less_or_equal(val: np.ndarray, *ref: np.ndarray): def all_between( # pylint: disable=too-many-arguments data: SingleDataset, - component: PowerGridComponent, + component: ComponentType, field: str, ref_value_1: Union[int, float, str], ref_value_2: Union[int, float, str], @@ -283,7 +283,7 @@ def outside(val: np.ndarray, *ref: np.ndarray) -> np.ndarray: def all_between_or_at( # pylint: disable=too-many-arguments data: SingleDataset, - component: PowerGridComponent, + component: ComponentType, field: str, ref_value_1: Union[int, float, str], ref_value_2: Union[int, float, str], @@ -333,7 +333,7 @@ def outside(val: np.ndarray, *ref: np.ndarray) -> np.ndarray: def none_match_comparison( data: SingleDataset, - component: PowerGridComponent, + component: ComponentType, field: str, compare_fn: Callable, ref_value: ComparisonError.RefType, @@ -384,7 +384,7 @@ def none_match_comparison( return [] -def all_identical(data: SingleDataset, component: PowerGridComponent, field: str) -> List[NotIdenticalError]: +def all_identical(data: SingleDataset, component: ComponentType, field: str) -> List[NotIdenticalError]: """ Check that for all records of a particular type of component, the values in the 'field' column are identical. @@ -408,7 +408,7 @@ def all_identical(data: SingleDataset, component: PowerGridComponent, field: str def all_enabled_identical( - data: SingleDataset, component: PowerGridComponent, field: str, status_field: str + data: SingleDataset, component: ComponentType, field: str, status_field: str ) -> List[NotIdenticalError]: """ Check that for all records of a particular type of component, the values in the 'field' column are identical. @@ -436,7 +436,7 @@ def all_enabled_identical( ) -def all_unique(data: SingleDataset, component: PowerGridComponent, field: str) -> List[NotUniqueError]: +def all_unique(data: SingleDataset, component: ComponentType, field: str) -> List[NotUniqueError]: """ Check that for all records of a particular type of component, the values in the 'field' column are unique within the 'field' column of that component. @@ -460,7 +460,7 @@ def all_unique(data: SingleDataset, component: PowerGridComponent, field: str) - def all_cross_unique( - data: SingleDataset, fields: List[Tuple[PowerGridComponent, str]], cross_only=True + data: SingleDataset, fields: List[Tuple[ComponentType, str]], cross_only=True ) -> List[MultiComponentNotUniqueError]: """ Check that for all records of a particular type of component, the values in the 'field' column are unique within @@ -477,7 +477,7 @@ def all_cross_unique( A list containing zero or one MultiComponentNotUniqueError, listing all fields and ids where the value was not unique between the fields. """ - all_values: Dict[int, List[Tuple[Tuple[PowerGridComponent, str], int]]] = {} + all_values: Dict[int, List[Tuple[Tuple[ComponentType, str], int]]] = {} duplicate_ids = set() for component, field in fields: for obj_id, value in zip(data[component]["id"], data[component][field]): @@ -496,7 +496,7 @@ def all_cross_unique( def all_valid_enum_values( - data: SingleDataset, component: PowerGridComponent, field: str, enum: Union[Type[Enum], List[Type[Enum]]] + data: SingleDataset, component: ComponentType, field: str, enum: Union[Type[Enum], List[Type[Enum]]] ) -> List[InvalidEnumValueError]: """ Check that for all records of a particular type of component, the values in the 'field' column are valid values for @@ -504,7 +504,7 @@ def all_valid_enum_values( Args: data (SingleDataset): The input/update data set for all components - component (PowerGridComponent): The component of interest + component (ComponentType): The component of interest field (str): The field of interest enum (Type[Enum] | List[Type[Enum]]): The enum type to validate against, or a list of such enum types @@ -527,20 +527,20 @@ def all_valid_enum_values( def all_valid_associated_enum_values( # pylint: disable=too-many-arguments data: SingleDataset, - component: PowerGridComponent, + component: ComponentType, field: str, ref_object_id_field: str, - ref_components: List[PowerGridComponent], + ref_components: List[ComponentType], enum: Union[Type[Enum], List[Type[Enum]]], **filters: Any, ) -> List[InvalidAssociatedEnumValueError]: """ Args: data (SingleDataset): The input/update data set for all components - component (PowerGridComponent): The component of interest + component (ComponentType): The component of interest field (str): The field of interest ref_object_id_field (str): The field that contains the referenced component ids - ref_components (List[PowerGridComponent]): The component or components in which we want to look for ids + ref_components (List[ComponentType]): The component or components in which we want to look for ids enum (Type[Enum] | List[Type[Enum]]): The enum type to validate against, or a list of such enum types **filters: One or more filters on the dataset. E.g. regulated_object="transformer". @@ -569,9 +569,9 @@ def all_valid_associated_enum_values( # pylint: disable=too-many-arguments def all_valid_ids( data: SingleDataset, - component: PowerGridComponent, + component: ComponentType, field: str, - ref_components: Union[PowerGridComponent, List[PowerGridComponent]], + ref_components: Union[ComponentType, List[ComponentType]], **filters: Any, ) -> List[InvalidIdError]: """ @@ -600,7 +600,7 @@ def all_valid_ids( return [] -def all_boolean(data: SingleDataset, component: PowerGridComponent, field: str) -> List[NotBooleanError]: +def all_boolean(data: SingleDataset, component: ComponentType, field: str) -> List[NotBooleanError]: """ Check that for all records of a particular type of component, the values in the 'field' column are valid boolean values, i.e. 0 or 1. Returns an empty list on success, or a list containing a single error object on failure. @@ -622,7 +622,7 @@ def all_boolean(data: SingleDataset, component: PowerGridComponent, field: str) def all_not_two_values_zero( - data: SingleDataset, component: PowerGridComponent, field_1: str, field_2: str + data: SingleDataset, component: ComponentType, field_1: str, field_2: str ) -> List[TwoValuesZeroError]: """ Check that for all records of a particular type of component, the values in the 'field_1' and 'field_2' column are @@ -647,7 +647,7 @@ def all_not_two_values_zero( def all_not_two_values_equal( - data: SingleDataset, component: PowerGridComponent, field_1: str, field_2: str + data: SingleDataset, component: ComponentType, field_1: str, field_2: str ) -> List[SameValueError]: """ Check that for all records of a particular type of component, the values in the 'field_1' and 'field_2' column are @@ -673,7 +673,7 @@ def all_not_two_values_equal( def all_ids_exist_in_data_set( - data: SingleDataset, ref_data: SingleDataset, component: PowerGridComponent, ref_name: str + data: SingleDataset, ref_data: SingleDataset, component: ComponentType, ref_name: str ) -> List[IdNotInDatasetError]: """ Check that for all records of a particular type of component, the ids exist in the reference data set. @@ -694,9 +694,7 @@ def all_ids_exist_in_data_set( return [] -def all_finite( - data: SingleDataset, exceptions: Optional[Dict[PowerGridComponent, List[str]]] = None -) -> List[InfinityError]: +def all_finite(data: SingleDataset, exceptions: Optional[Dict[ComponentType, List[str]]] = None) -> List[InfinityError]: """ Check that for all records in all component, the values in all columns are finite value, i.e. float values other than inf, or -inf. Nan values are ignored, as in all other comparison functions. You can use non_missing() to @@ -730,7 +728,7 @@ def all_finite( def none_missing( data: SingleDataset, - component: PowerGridComponent, + component: ComponentType, fields: Union[List[Union[str, List[str]]], str, List[str]], index: int = 0, ) -> List[MissingValueError]: @@ -767,7 +765,7 @@ def none_missing( return errors -def valid_p_q_sigma(data: SingleDataset, component: PowerGridComponent) -> List[MultiFieldValidationError]: +def valid_p_q_sigma(data: SingleDataset, component: ComponentType) -> List[MultiFieldValidationError]: """ Check validity of the pair `(p_sigma, q_sigma)` for 'sym_power_sensor' and 'asym_power_sensor'. @@ -800,7 +798,7 @@ def valid_p_q_sigma(data: SingleDataset, component: PowerGridComponent) -> List[ def all_valid_clocks( - data: SingleDataset, component: PowerGridComponent, clock_field: str, winding_from_field: str, winding_to_field: str + data: SingleDataset, component: ComponentType, clock_field: str, winding_from_field: str, winding_to_field: str ) -> List[TransformerClockError]: """ Custom validation rule: Odd clock number is only allowed for Dy(n) or Y(N)d configuration. @@ -838,7 +836,7 @@ def all_valid_clocks( def all_valid_fault_phases( - data: SingleDataset, component: PowerGridComponent, fault_type_field: str, fault_phase_field: str + data: SingleDataset, component: ComponentType, fault_type_field: str, fault_phase_field: str ) -> List[FaultPhaseError]: """ Custom validation rule: Only a subset of fault_phases is supported for each fault type. @@ -893,19 +891,19 @@ def _fault_phase_supported(fault_type: FaultType, fault_phase: FaultPhase): def all_supported_tap_control_side( # pylint: disable=too-many-arguments data: SingleDataset, - component: PowerGridComponent, + component: ComponentType, control_side_field: str, regulated_object_field: str, - tap_side_fields: List[Tuple[PowerGridComponent, str]], + tap_side_fields: List[Tuple[ComponentType, str]], **filters: Any, ) -> List[UnsupportedTransformerRegulationError]: """ Args: data (SingleDataset): The input/update data set for all components - component (PowerGridComponent): The component of interest + component (ComponentType): The component of interest control_side_field (str): The field of interest regulated_object_field (str): The field that contains the regulated component ids - tap_side_fields (List[Tuple[PowerGridComponent, str]]): The fields of interest per regulated component, + tap_side_fields (List[Tuple[ComponentType, str]]): The fields of interest per regulated component, formatted as [(component_1, field_1), (component_2, field_2)] **filters: One or more filters on the dataset. E.g. regulated_object="transformer". diff --git a/src/power_grid_model/validation/utils.py b/src/power_grid_model/validation/utils.py index 8f1da55b9..899ee5061 100644 --- a/src/power_grid_model/validation/utils.py +++ b/src/power_grid_model/validation/utils.py @@ -12,7 +12,7 @@ from power_grid_model import power_grid_meta_data from power_grid_model.data_types import SingleDataset -from power_grid_model.dataset_definitions import PowerGridComponent, PowerGridDataType +from power_grid_model.dataset_definitions import ComponentType, DataType from power_grid_model.validation.errors import ValidationError @@ -97,7 +97,7 @@ def update_input_data(input_data: SingleDataset, update_data: SingleDataset): return merged_data -def update_component_data(component: PowerGridComponent, input_data: np.ndarray, update_data: np.ndarray) -> None: +def update_component_data(component: ComponentType, input_data: np.ndarray, update_data: np.ndarray) -> None: """ Update the data in a numpy array, with another numpy array, indexed on the "id" field and only non-NaN values are overwritten. @@ -162,7 +162,7 @@ def errors_to_string( return msg -def nan_type(component: PowerGridComponent, field: str, data_type: PowerGridDataType = "input"): +def nan_type(component: ComponentType, field: str, data_type: DataType = "input"): """ Helper function to retrieve the nan value for a certain field as defined in the power_grid_meta_data. """ @@ -206,7 +206,7 @@ def get_indexer(source: np.ndarray, target: np.ndarray, default_value: Optional[ def set_default_value( - data: SingleDataset, component: PowerGridComponent, field: str, default_value: Union[int, float, np.ndarray] + data: SingleDataset, component: ComponentType, field: str, default_value: Union[int, float, np.ndarray] ): """ This function sets the default value in the data that is to be validated, so the default values are included in the @@ -233,9 +233,7 @@ def set_default_value( data[component][field][mask] = default_value -def get_valid_ids( - data: SingleDataset, ref_components: Union[PowerGridComponent, List[PowerGridComponent]] -) -> List[int]: +def get_valid_ids(data: SingleDataset, ref_components: Union[ComponentType, List[ComponentType]]) -> List[int]: """ This function returns the valid IDs specified by all ref_components @@ -265,7 +263,7 @@ def get_valid_ids( return list(valid_ids) -def get_mask(data: SingleDataset, component: PowerGridComponent, field: str, **filters: Any) -> np.ndarray: +def get_mask(data: SingleDataset, component: ComponentType, field: str, **filters: Any) -> np.ndarray: """ Get a mask based on the specified filters. E.g. measured_terminal_type=MeasuredTerminalType.source. diff --git a/src/power_grid_model/validation/validation.py b/src/power_grid_model/validation/validation.py index 32ebd5655..30620fdc6 100644 --- a/src/power_grid_model/validation/validation.py +++ b/src/power_grid_model/validation/validation.py @@ -18,7 +18,7 @@ from power_grid_model import power_grid_meta_data from power_grid_model._utils import convert_batch_dataset_to_batch_list from power_grid_model.data_types import BatchDataset, Dataset, SingleDataset -from power_grid_model.dataset_definitions import PowerGridComponent, PowerGridDataType +from power_grid_model.dataset_definitions import ComponentType, DataType from power_grid_model.enum import ( Branch3Side, BranchSide, @@ -152,7 +152,7 @@ def validate_batch_data( return errors if errors else None -def assert_valid_data_structure(data: Dataset, data_type: PowerGridDataType) -> None: +def assert_valid_data_structure(data: Dataset, data_type: DataType) -> None: """ Checks if all component names are valid and if the data inside the component matches the required Numpy structured array as defined in the Power Grid Model meta data. @@ -206,7 +206,7 @@ def validate_unique_ids_across_components(data: SingleDataset) -> List[MultiComp def validate_ids_exist( - update_data: Dict[PowerGridComponent, np.ndarray], input_data: SingleDataset + update_data: Dict[ComponentType, np.ndarray], input_data: SingleDataset ) -> List[IdNotInDatasetError]: """ Checks if all ids of the components in the update data exist in the input data. This needs to be true, because you @@ -229,8 +229,8 @@ def validate_ids_exist( def _process_power_sigma_and_p_q_sigma( data: SingleDataset, - sensor: PowerGridComponent, - required_list: Dict[Union[PowerGridComponent, str], List[Union[str, List[str]]]], + sensor: ComponentType, + required_list: Dict[Union[ComponentType, str], List[Union[str, List[str]]]], ) -> None: """ Helper function to process the required list when both `p_sigma` and `q_sigma` exist @@ -281,7 +281,7 @@ def validate_required_values( An empty list if all required data is available, or a list of MissingValueErrors. """ # Base - required: Dict[Union[PowerGridComponent, str], List[Union[str, List[str]]]] = {"base": ["id"]} + required: Dict[Union[ComponentType, str], List[Union[str, List[str]]]] = {"base": ["id"]} # Nodes required["node"] = required["base"] + ["u_rated"] @@ -497,7 +497,7 @@ def validate_values(data: SingleDataset, calculation_type: Optional[CalculationT # pylint: disable=missing-function-docstring -def validate_base(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: +def validate_base(data: SingleDataset, component: ComponentType) -> List[ValidationError]: errors: List[ValidationError] = list(all_unique(data, component, "id")) return errors @@ -508,7 +508,7 @@ def validate_node(data: SingleDataset) -> List[ValidationError]: return errors -def validate_branch(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: +def validate_branch(data: SingleDataset, component: ComponentType) -> List[ValidationError]: errors = validate_base(data, component) errors += all_valid_ids(data, component, "from_node", "node") errors += all_valid_ids(data, component, "to_node", "node") @@ -554,7 +554,7 @@ def validate_transformer(data: SingleDataset) -> List[ValidationError]: return errors -def validate_branch3(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: +def validate_branch3(data: SingleDataset, component: ComponentType) -> List[ValidationError]: errors = validate_base(data, component) errors += all_valid_ids(data, component, "node_1", "node") errors += all_valid_ids(data, component, "node_2", "node") @@ -686,7 +686,7 @@ def validate_three_winding_transformer(data: SingleDataset) -> List[ValidationEr return errors -def validate_appliance(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: +def validate_appliance(data: SingleDataset, component: ComponentType) -> List[ValidationError]: errors = validate_base(data, component) errors += all_boolean(data, component, "status") errors += all_valid_ids(data, component, "node", "node") @@ -702,7 +702,7 @@ def validate_source(data: SingleDataset) -> List[ValidationError]: return errors -def validate_generic_load_gen(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: +def validate_generic_load_gen(data: SingleDataset, component: ComponentType) -> List[ValidationError]: errors = validate_appliance(data, component) errors += all_valid_enum_values(data, component, "type", LoadGenType) return errors @@ -713,7 +713,7 @@ def validate_shunt(data: SingleDataset) -> List[ValidationError]: return errors -def validate_generic_voltage_sensor(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: +def validate_generic_voltage_sensor(data: SingleDataset, component: ComponentType) -> List[ValidationError]: errors = validate_base(data, component) errors += all_greater_than_zero(data, component, "u_sigma") errors += all_greater_than_zero(data, component, "u_measured") @@ -721,7 +721,7 @@ def validate_generic_voltage_sensor(data: SingleDataset, component: PowerGridCom return errors -def validate_generic_power_sensor(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: +def validate_generic_power_sensor(data: SingleDataset, component: ComponentType) -> List[ValidationError]: errors = validate_base(data, component) errors += all_greater_than_zero(data, component, "power_sigma") errors += all_valid_enum_values(data, component, "measured_terminal_type", MeasuredTerminalType) @@ -831,7 +831,7 @@ def validate_fault(data: SingleDataset) -> List[ValidationError]: return errors -def validate_regulator(data: SingleDataset, component: PowerGridComponent) -> List[ValidationError]: +def validate_regulator(data: SingleDataset, component: ComponentType) -> List[ValidationError]: errors = validate_base(data, component) errors += all_valid_ids( data, diff --git a/tests/unit/test_dataset_definitions.py b/tests/unit/test_dataset_definitions.py index 9e2a2207e..ff2536d22 100644 --- a/tests/unit/test_dataset_definitions.py +++ b/tests/unit/test_dataset_definitions.py @@ -5,12 +5,12 @@ import pytest from power_grid_model import power_grid_meta_data -from power_grid_model.dataset_definitions import PowerGridComponents, PowerGridDataTypes +from power_grid_model.dataset_definitions import ComponentTypes, DataTypes def test_power_grid_data_types(): power_grid_data_types = [data_type for data_type in power_grid_meta_data] - gen_power_grid_data_types = [member.value for member in PowerGridDataTypes] + gen_power_grid_data_types = [member.value for member in DataTypes] power_grid_data_types.sort() gen_power_grid_data_types.sort() assert power_grid_data_types == gen_power_grid_data_types @@ -18,7 +18,7 @@ def test_power_grid_data_types(): def test_power_grid_components(): power_grid_components = [component for component in power_grid_meta_data["input"]] - gen_power_grid_components = [member.value for member in PowerGridComponents] + gen_power_grid_components = [member.value for member in ComponentTypes] power_grid_components.sort() gen_power_grid_components.sort() assert power_grid_components == gen_power_grid_components diff --git a/tests/unit/test_power_grid_model.py b/tests/unit/test_power_grid_model.py index 3f0edc8be..17d63418c 100644 --- a/tests/unit/test_power_grid_model.py +++ b/tests/unit/test_power_grid_model.py @@ -9,7 +9,7 @@ import pytest from power_grid_model import PowerGridModel, initialize_array -from power_grid_model.dataset_definitions import PowerGridComponent +from power_grid_model.dataset_definitions import ComponentType from power_grid_model.errors import PowerGridBatchError, PowerGridError from power_grid_model.validation import assert_valid_input_data @@ -78,12 +78,12 @@ def test_simple_update(model: PowerGridModel, case_data): update_batch = case_data["update_batch"] source_indptr = update_batch["source"]["indptr"] source_update = update_batch["source"]["data"] - update_data: Dict[PowerGridComponent, np.ndarray] = { + update_data: Dict[ComponentType, np.ndarray] = { "source": source_update[source_indptr[0] : source_indptr[1]], "sym_load": update_batch["sym_load"][0, :], } model.update(update_data=update_data) - expected_result: Dict[PowerGridComponent, np.ndarray] = {"node": case_data["output_batch"]["node"][0, :]} + expected_result: Dict[ComponentType, np.ndarray] = {"node": case_data["output_batch"]["node"][0, :]} result = model.calculate_power_flow() compare_result(result, expected_result, rtol=0.0, atol=1e-8) @@ -91,7 +91,7 @@ def test_simple_update(model: PowerGridModel, case_data): def test_update_error(model: PowerGridModel): load_update = initialize_array("update", "sym_load", 1) load_update["id"] = 5 - update_data: Dict[PowerGridComponent, np.ndarray] = {"sym_load": load_update} + update_data: Dict[ComponentType, np.ndarray] = {"sym_load": load_update} with pytest.raises(PowerGridError, match="The id cannot be found:"): model.update(update_data=update_data) @@ -155,7 +155,7 @@ def test_batch_calculation_error_continue(model: PowerGridModel, case_data): assert "The id cannot be found:" in error.error_messages[0] # assert value result for scenario 0 result = {"node": result["node"][error.succeeded_scenarios, :]} - expected_result: Dict[PowerGridComponent, np.ndarray] = { + expected_result: Dict[ComponentType, np.ndarray] = { "node": case_data["output_batch"]["node"][error.succeeded_scenarios, :] } compare_result(result, expected_result, rtol=0.0, atol=1e-8) diff --git a/tests/unit/test_serialization.py b/tests/unit/test_serialization.py index 638d05927..1636a1383 100644 --- a/tests/unit/test_serialization.py +++ b/tests/unit/test_serialization.py @@ -10,7 +10,7 @@ import pytest from power_grid_model.core.power_grid_dataset import get_dataset_type -from power_grid_model.dataset_definitions import PowerGridComponent +from power_grid_model.dataset_definitions import ComponentType from power_grid_model.utils import json_deserialize, json_serialize, msgpack_deserialize, msgpack_serialize @@ -267,9 +267,9 @@ def assert_almost_equal(value: np.ndarray, reference: Any): def assert_scenario_correct( - deserialized_dataset: Mapping[PowerGridComponent, np.ndarray], + deserialized_dataset: Mapping[ComponentType, np.ndarray], serialized_dataset: Mapping[str, Any], - sparse_components: List[PowerGridComponent], + sparse_components: List[ComponentType], ): for key in serialized_dataset["data"]: if key not in deserialized_dataset: @@ -294,7 +294,7 @@ def assert_scenario_correct( def assert_serialization_correct( - deserialized_dataset: Mapping[PowerGridComponent, Union[np.ndarray, Mapping[str, np.ndarray]]], + deserialized_dataset: Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], serialized_dataset: Mapping[str, Any], ): """Assert the dataset correctly reprensents the input data.""" diff --git a/tests/unit/utils.py b/tests/unit/utils.py index b5e9e080a..9be1636e5 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -13,7 +13,7 @@ from power_grid_model.core.power_grid_model import PowerGridModel from power_grid_model.data_types import Dataset, PythonDataset, SingleDataset -from power_grid_model.dataset_definitions import PowerGridDataType +from power_grid_model.dataset_definitions import DataType from power_grid_model.errors import ( AutomaticTapCalculationError, ConflictID, @@ -285,7 +285,7 @@ def compare_result(actual: SingleDataset, expected: SingleDataset, rtol: float, ) -def convert_python_to_numpy(data: PythonDataset, data_type: PowerGridDataType) -> Dataset: +def convert_python_to_numpy(data: PythonDataset, data_type: DataType) -> Dataset: """ Convert native python data to internal numpy From e305dad786f3af3b8a64c3390a6a34c8f435e684 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Mon, 24 Jun 2024 13:06:11 +0200 Subject: [PATCH 24/47] Addresed review comments Signed-off-by: Santiago Figueroa --- code_generation/code_gen.py | 11 +++++----- .../power_grid_model/metadata_enums.py.jinja | 20 ++++++++++--------- docs/api_reference/python-api-reference.md | 2 +- src/power_grid_model/core/data_handling.py | 2 +- src/power_grid_model/dataset_definitions.py | 20 ------------------- 5 files changed, 18 insertions(+), 37 deletions(-) diff --git a/code_generation/code_gen.py b/code_generation/code_gen.py index 9234ccd84..7bf8fa33c 100644 --- a/code_generation/code_gen.py +++ b/code_generation/code_gen.py @@ -132,9 +132,9 @@ def render_metadata_types(self, template_path: Path, data_path: Path, output_pat for prefix in prefixes: dataset_types.append(f"{prefix}{dataset.name}") - if dataset.name == "input": - for component in dataset.components: - components.append(component.names) + if dataset.name == "input": + for component in dataset.components: + components.append(component.names) components = [name for sublist in components for name in sublist] @@ -154,9 +154,8 @@ def code_gen(self): for template_path in TEMPLATE_DIR.rglob(f"{template_name}.*.jinja"): output_suffix = template_path.with_suffix("").suffix output_dir = template_path.parent.relative_to(TEMPLATE_DIR) - if template_name == "metadata_enums": - template_name = "dataset_class_maps" # To use existing data. - for data_path in DATA_DIR.glob(f"{template_name}/*.json"): + data_name = template_name if template_name != "metadata_enums" else "dataset_class_maps" + for data_path in DATA_DIR.glob(f"{data_name}/*.json"): output_path = self.base_output_path / output_dir / data_path.with_suffix(output_suffix).name output_path.parent.mkdir(parents=True, exist_ok=True) print(f"Generating file: {output_path}") diff --git a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja index a3aace202..8c43e3f74 100644 --- a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja +++ b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja @@ -26,15 +26,16 @@ sys.path.append(sys_path) class DataTypes(Enum): """Dataset types.""" - {% for dataset_type in dataset_types %} + + {%- for dataset_type in dataset_types %} {{ dataset_type }} = "{{ dataset_type }}" - {% endfor %} + {%- endfor %} DataTypesLiteral = Literal[ - {% for dataset_type in dataset_types %} + {%- for dataset_type in dataset_types %} "{{ dataset_type }}", - {% endfor %} + {%- endfor %} ] @@ -52,15 +53,16 @@ A DataType is the type of a :class:`BatchDataset`. class ComponentTypes(Enum): """Grid component types.""" - {% for component in components %} + + {%- for component in components %} {{ component }} = "{{ component }}" - {% endfor %} + {%- endfor %} ComponentTypesLiteral = Literal[ - {% for component in components %} + {%- for component in components %} "{{ component }}", - {% endfor %} + {%- endfor %} ] @@ -74,4 +76,4 @@ A ComponentType is the type of a grid component. - ComponentType.line = "line" """ -# pylint: enable=invalid-name +# pylint: enable=invalid-name \ No newline at end of file diff --git a/docs/api_reference/python-api-reference.md b/docs/api_reference/python-api-reference.md index a9b1f5220..7eae96fef 100644 --- a/docs/api_reference/python-api-reference.md +++ b/docs/api_reference/python-api-reference.md @@ -33,7 +33,7 @@ SPDX-License-Identifier: MPL-2.0 .. autoclass:: power_grid_model.data_types.DenseBatchArray .. autoclass:: power_grid_model.data_types.SparseBatchArray .. autoclass:: power_grid_model.dataset_definitions.DataType -.. autoclass:: power_grid_model.dataset_definitions.DataType +.. autoclass:: power_grid_model.dataset_definitions.ComponentType ``` ## error types diff --git a/src/power_grid_model/core/data_handling.py b/src/power_grid_model/core/data_handling.py index b5c5522f4..cda7b67f2 100644 --- a/src/power_grid_model/core/data_handling.py +++ b/src/power_grid_model/core/data_handling.py @@ -150,6 +150,6 @@ def create_output_data( shape: Union[Tuple[int], Tuple[int, int]] = (batch_size, count) else: shape = (count,) - result_dict[name] = initialize_array(output_type.value, name, shape=shape, empty=True) # + result_dict[name] = initialize_array(output_type.value, name, shape=shape, empty=True) return result_dict diff --git a/src/power_grid_model/dataset_definitions.py b/src/power_grid_model/dataset_definitions.py index 2c401ad83..b6acfc126 100644 --- a/src/power_grid_model/dataset_definitions.py +++ b/src/power_grid_model/dataset_definitions.py @@ -27,13 +27,9 @@ class DataTypes(Enum): """Dataset types.""" input = "input" - sym_output = "sym_output" - asym_output = "asym_output" - update = "update" - sc_output = "sc_output" @@ -61,37 +57,21 @@ class ComponentTypes(Enum): """Grid component types.""" node = "node" - line = "line" - link = "link" - transformer = "transformer" - transformer_tap_regulator = "transformer_tap_regulator" - three_winding_transformer = "three_winding_transformer" - sym_load = "sym_load" - sym_gen = "sym_gen" - asym_load = "asym_load" - asym_gen = "asym_gen" - shunt = "shunt" - source = "source" - sym_voltage_sensor = "sym_voltage_sensor" - asym_voltage_sensor = "asym_voltage_sensor" - sym_power_sensor = "sym_power_sensor" - asym_power_sensor = "asym_power_sensor" - fault = "fault" From b14ed3948d2b9d971b1c46120b402ee2dc4ebbd8 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 25 Jun 2024 12:56:43 +0200 Subject: [PATCH 25/47] Quick examples run well Signed-off-by: Santiago Figueroa --- .../core/metadata_enums.py.jinja | 65 +++ .../power_grid_model/metadata_enums.py.jinja | 79 ---- scripts/quick_example.py | 5 +- scripts/quick_example_batch.py | 7 +- src/power_grid_model/__init__.py | 2 +- src/power_grid_model/_utils.py | 2 +- src/power_grid_model/core/buffer_handling.py | 2 +- src/power_grid_model/core/data_handling.py | 12 +- .../core/dataset_definitions.py | 85 ++++ .../core/power_grid_dataset.py | 19 +- src/power_grid_model/core/power_grid_meta.py | 13 +- src/power_grid_model/core/power_grid_model.py | 67 ++- src/power_grid_model/core/serialization.py | 2 +- src/power_grid_model/data_types.py | 2 +- src/power_grid_model/dataset_definitions.py | 109 ----- src/power_grid_model/utils.py | 6 +- src/power_grid_model/validation/errors.py | 2 +- src/power_grid_model/validation/rules.py | 2 +- src/power_grid_model/validation/utils.py | 7 +- src/power_grid_model/validation/validation.py | 446 +++++++++++------- tests/unit/test_dataset_definitions.py | 9 +- tests/unit/test_power_grid_model.py | 13 +- tests/unit/test_serialization.py | 2 +- tests/unit/utils.py | 2 +- .../validation/test_validation_functions.py | 3 +- 25 files changed, 537 insertions(+), 426 deletions(-) create mode 100644 code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja delete mode 100644 code_generation/templates/src/power_grid_model/metadata_enums.py.jinja create mode 100644 src/power_grid_model/core/dataset_definitions.py delete mode 100644 src/power_grid_model/dataset_definitions.py diff --git a/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja new file mode 100644 index 000000000..4eca15897 --- /dev/null +++ b/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja @@ -0,0 +1,65 @@ +# SPDX-FileCopyrightText: Contributors to the Power Grid Model project +# +# SPDX-License-Identifier: MPL-2.0 + +"""Data types for power grid model dataset and component types.""" + +# This file is automatically generated. DO NOT modify it manually! + +from enum import Enum +from typing import Any, Dict + +# pylint: disable=invalid-name + + +class DataType(Enum): + """ + A DataType is the type of a :class:`BatchDataset`. + + - Examples: + + - DataType.input = "input" + - DataType.update = "update" + """ + + {%- for dataset_type in dataset_types %} + {{ dataset_type }} = "{{ dataset_type }}" + {%- endfor %} + + +class ComponentType(Enum): + """ + A ComponentType is the type of a grid component. + + - Examples: + + - ComponentType.node = "node" + - ComponentType.line = "line" + """ + + {%- for component in components %} + {{ component }} = "{{ component }}" + {%- endfor %} + + +# pylint: enable=invalid-name + + +def _map_to_datatypes(data: Dict[Any, Any]) -> Dict[DataType, Any]: + """Helper function to map datatype str keys to DataType.""" + def map_keys(key: Any): + if isinstance(key, str): + return DataType[key] + return key + + return {map_keys(key): value for key, value in data.items()} + + +def _map_to_componenttypes(data: Dict[Any, Any]) -> Dict[ComponentType, Any]: + """Helper function to map componenttype str keys to ComponentType.""" + def map_keys(key: Any): + if isinstance(key, str): + return ComponentType[key] + return key + + return {map_keys(key): value for key, value in data.items()} diff --git a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja deleted file mode 100644 index 8c43e3f74..000000000 --- a/code_generation/templates/src/power_grid_model/metadata_enums.py.jinja +++ /dev/null @@ -1,79 +0,0 @@ -# SPDX-FileCopyrightText: Contributors to the Power Grid Model project -# -# SPDX-License-Identifier: MPL-2.0 - -"""Data types for power grid model dataset and component types.""" - -# This file is automatically generated. DO NOT modify it manually! - -import sys -from typing import Literal, Union - -# To avoid conflicts with src/power_grid_model/enum.py -# pylint: disable=wrong-import-position - -sys_path = sys.path.pop(0) -from enum import Enum - -# pylint: enable=wrong-import-position - -sys.path.append(sys_path) - -# Value names are defined in lower case instead of upper case -# pylint: disable=invalid-name - - -class DataTypes(Enum): - """Dataset types.""" - - - {%- for dataset_type in dataset_types %} - {{ dataset_type }} = "{{ dataset_type }}" - {%- endfor %} - - -DataTypesLiteral = Literal[ - {%- for dataset_type in dataset_types %} - "{{ dataset_type }}", - {%- endfor %} -] - - -DataType = Union[DataTypes, DataTypesLiteral] -""" -A DataType is the type of a :class:`BatchDataset`. - -- Examples: - - - DataType.input = "input" - - DataType.update = "update" -""" - - -class ComponentTypes(Enum): - """Grid component types.""" - - - {%- for component in components %} - {{ component }} = "{{ component }}" - {%- endfor %} - - -ComponentTypesLiteral = Literal[ - {%- for component in components %} - "{{ component }}", - {%- endfor %} -] - - -ComponentType = Union[ComponentTypes, ComponentTypesLiteral] -""" -A ComponentType is the type of a grid component. - -- Examples: - - - ComponentType.node = "node" - - ComponentType.line = "line" -""" - -# pylint: enable=invalid-name \ No newline at end of file diff --git a/scripts/quick_example.py b/scripts/quick_example.py index c2f9bbae1..d378f6ca8 100644 --- a/scripts/quick_example.py +++ b/scripts/quick_example.py @@ -7,8 +7,7 @@ import numpy as np import pandas as pd -from power_grid_model import LoadGenType, PowerGridModel, initialize_array -from power_grid_model.dataset_definitions import ComponentType +from power_grid_model import ComponentType, LoadGenType, PowerGridModel, initialize_array # node node = initialize_array("input", "node", 2) @@ -45,7 +44,7 @@ source["u_ref"] = [1.0] # input_data -input_data: Dict[ComponentType, np.ndarray] = { +input_data = { "node": node, "line": line, "sym_load": sym_load, diff --git a/scripts/quick_example_batch.py b/scripts/quick_example_batch.py index 0d520f2e5..47a1ff7cb 100644 --- a/scripts/quick_example_batch.py +++ b/scripts/quick_example_batch.py @@ -7,8 +7,7 @@ import numpy as np import pandas as pd -from power_grid_model import LoadGenType, PowerGridModel, initialize_array -from power_grid_model.dataset_definitions import ComponentType +from power_grid_model import ComponentType, LoadGenType, PowerGridModel, initialize_array """ node_1 ---line_3--- node_2 ---line_6--- node_7 @@ -57,7 +56,7 @@ source["u_ref"] = [1.0] # input_data -input_data: Dict[ComponentType, np.ndarray] = { +input_data = { "node": node, "line": line, "asym_load": asym_load, @@ -81,7 +80,7 @@ batch_load = initialize_array("update", "asym_load", (1000, 2)) batch_load["id"] = [[4, 8]] batch_load["p_specified"] = batch_p -batch_update: Dict[ComponentType, np.ndarray] = {"asym_load": batch_load} +batch_update = {"asym_load": batch_load} result = model.calculate_power_flow(symmetric=False, update_data=batch_update) print(result["node"]["u"].shape) # 1000 (scenarios) *3 (nodes) *3 (phases) diff --git a/src/power_grid_model/__init__.py b/src/power_grid_model/__init__.py index 5cb43f40a..54277c167 100644 --- a/src/power_grid_model/__init__.py +++ b/src/power_grid_model/__init__.py @@ -4,9 +4,9 @@ """Power Grid Model""" +from power_grid_model.core.dataset_definitions import ComponentType, DataType from power_grid_model.core.power_grid_meta import initialize_array, power_grid_meta_data from power_grid_model.core.power_grid_model import PowerGridModel -from power_grid_model.dataset_definitions import ComponentType, DataType from power_grid_model.enum import ( Branch3Side, BranchSide, diff --git a/src/power_grid_model/_utils.py b/src/power_grid_model/_utils.py index d4ef5f99c..4d808a7da 100644 --- a/src/power_grid_model/_utils.py +++ b/src/power_grid_model/_utils.py @@ -14,6 +14,7 @@ import numpy as np +from power_grid_model import ComponentType from power_grid_model.data_types import ( BatchArray, BatchDataset, @@ -24,7 +25,6 @@ SinglePythonDataset, SparseBatchArray, ) -from power_grid_model.dataset_definitions import ComponentType def is_nan(data) -> bool: diff --git a/src/power_grid_model/core/buffer_handling.py b/src/power_grid_model/core/buffer_handling.py index fc7b92029..66f7564ad 100644 --- a/src/power_grid_model/core/buffer_handling.py +++ b/src/power_grid_model/core/buffer_handling.py @@ -12,11 +12,11 @@ import numpy as np +from power_grid_model import ComponentType from power_grid_model.core.error_handling import VALIDATOR_MSG from power_grid_model.core.index_integer import IdxC, IdxNp from power_grid_model.core.power_grid_core import IdxPtr, VoidPtr from power_grid_model.core.power_grid_meta import ComponentMetaData -from power_grid_model.dataset_definitions import ComponentType @dataclass diff --git a/src/power_grid_model/core/data_handling.py b/src/power_grid_model/core/data_handling.py index cda7b67f2..2a60a5314 100644 --- a/src/power_grid_model/core/data_handling.py +++ b/src/power_grid_model/core/data_handling.py @@ -12,9 +12,9 @@ import numpy as np +from power_grid_model import ComponentType, DataType from power_grid_model.core.power_grid_dataset import CConstDataset, CMutableDataset from power_grid_model.core.power_grid_meta import initialize_array, power_grid_meta_data -from power_grid_model.dataset_definitions import ComponentType from power_grid_model.enum import CalculationType @@ -25,9 +25,9 @@ class OutputType(Enum): - asym_output """ - SYM_OUTPUT = "sym_output" - ASYM_OUTPUT = "asym_output" - SC_OUTPUT = "sc_output" + SYM_OUTPUT = DataType.sym_output + ASYM_OUTPUT = DataType.asym_output + SC_OUTPUT = DataType.sc_output def get_output_type(*, calculation_type: CalculationType, symmetric: bool) -> OutputType: @@ -64,7 +64,7 @@ def prepare_input_view(input_data: Mapping[ComponentType, np.ndarray]) -> CConst Returns: instance of CConstDataset ready to be fed into C API """ - return CConstDataset(input_data, dataset_type="input") + return CConstDataset(input_data, dataset_type=DataType.input) def prepare_update_view( @@ -80,7 +80,7 @@ def prepare_update_view( Returns: instance of CConstDataset ready to be fed into C API """ - return CConstDataset(update_data, dataset_type="update") + return CConstDataset(update_data, dataset_type=DataType.update) def prepare_output_view(output_data: Mapping[ComponentType, np.ndarray], output_type: OutputType) -> CMutableDataset: diff --git a/src/power_grid_model/core/dataset_definitions.py b/src/power_grid_model/core/dataset_definitions.py new file mode 100644 index 000000000..6e3570a38 --- /dev/null +++ b/src/power_grid_model/core/dataset_definitions.py @@ -0,0 +1,85 @@ +# SPDX-FileCopyrightText: Contributors to the Power Grid Model project +# +# SPDX-License-Identifier: MPL-2.0 + +"""Data types for power grid model dataset and component types.""" + +# This file is automatically generated. DO NOT modify it manually! + +from enum import Enum +from typing import Any, Dict + +# pylint: disable=invalid-name + + +class DataType(str, Enum): + """ + A DataType is the type of a :class:`BatchDataset`. + + - Examples: + + - DataType.input = "input" + - DataType.update = "update" + """ + + input = "input" + sym_output = "sym_output" + asym_output = "asym_output" + update = "update" + sc_output = "sc_output" + + +class ComponentType(str, Enum): + """ + A ComponentType is the type of a grid component. + + - Examples: + + - ComponentType.node = "node" + - ComponentType.line = "line" + """ + + node = "node" + line = "line" + link = "link" + transformer = "transformer" + transformer_tap_regulator = "transformer_tap_regulator" + three_winding_transformer = "three_winding_transformer" + sym_load = "sym_load" + sym_gen = "sym_gen" + asym_load = "asym_load" + asym_gen = "asym_gen" + shunt = "shunt" + source = "source" + sym_voltage_sensor = "sym_voltage_sensor" + asym_voltage_sensor = "asym_voltage_sensor" + sym_power_sensor = "sym_power_sensor" + asym_power_sensor = "asym_power_sensor" + fault = "fault" + + +# pylint: enable=invalid-name + + +def _str_to_datatype(data_type: Any) -> DataType: + """Helper function to transform data_type str to DataType.""" + if isinstance(data_type, str): + return DataType[data_type] + return data_type + + +def _map_to_datatypes(data: Dict[Any, Any]) -> Dict[DataType, Any]: + """Helper function to map datatype str keys to DataType.""" + return {_str_to_datatype(key): value for key, value in data.items()} + + +def _str_to_componenttype(component: Any) -> ComponentType: + """Helper function to transform component str to ComponentType.""" + if isinstance(component, str): + return ComponentType[component] + return component + + +def _map_to_componenttypes(data: Dict[Any, Any]) -> Dict[ComponentType, Any]: + """Helper function to map componenttype str keys to ComponentType.""" + return {_str_to_componenttype(key): value for key, value in data.items()} diff --git a/src/power_grid_model/core/power_grid_dataset.py b/src/power_grid_model/core/power_grid_dataset.py index 82b110658..724eb6c14 100644 --- a/src/power_grid_model/core/power_grid_dataset.py +++ b/src/power_grid_model/core/power_grid_dataset.py @@ -6,7 +6,7 @@ Power grid model raw dataset handler """ -from typing import Dict, List, Mapping, Optional, Union +from typing import Any, Dict, List, Mapping, Optional, Union import numpy as np @@ -17,6 +17,7 @@ get_buffer_properties, get_buffer_view, ) +from power_grid_model.core.dataset_definitions import ComponentType, DataType, _str_to_componenttype from power_grid_model.core.error_handling import VALIDATOR_MSG, assert_no_error from power_grid_model.core.power_grid_core import ( ConstDatasetPtr, @@ -26,7 +27,6 @@ power_grid_core as pgc, ) from power_grid_model.core.power_grid_meta import DatasetMetaData, power_grid_meta_data -from power_grid_model.dataset_definitions import ComponentType, DataType from power_grid_model.errors import PowerGridError @@ -90,7 +90,10 @@ def components(self) -> List[ComponentType]: Returns: A list of the component names in the dataset """ - return [pgc.dataset_info_component_name(self._info, idx) for idx in range(self.n_components())] + return [ + _str_to_componenttype(pgc.dataset_info_component_name(self._info, idx)) + for idx in range(self.n_components()) + ] def elements_per_scenario(self) -> Mapping[ComponentType, int]: """ @@ -188,13 +191,13 @@ def __new__( Mapping[ComponentType, np.ndarray], Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], - dataset_type: Optional[DataType] = None, + dataset_type: Any = None, ): instance = super().__new__(cls) instance._mutable_dataset = MutableDatasetPtr() instance._buffer_views = [] - instance._dataset_type = dataset_type if isinstance(dataset_type, str) else get_dataset_type(data) + instance._dataset_type = dataset_type if dataset_type in DataType else get_dataset_type(data) instance._schema = power_grid_meta_data[instance._dataset_type] if data: @@ -206,7 +209,7 @@ def __new__( instance._batch_size = 1 instance._mutable_dataset = pgc.create_dataset_mutable( - instance._dataset_type, instance._is_batch, instance._batch_size + instance._dataset_type.value, instance._is_batch, instance._batch_size ) assert_no_error() @@ -292,10 +295,10 @@ def _add_component_data( self._buffer_views.append(c_buffer) self._register_buffer(component, c_buffer) - def _register_buffer(self, component, buffer: CBuffer): + def _register_buffer(self, component: ComponentType, buffer: CBuffer): pgc.dataset_mutable_add_buffer( dataset=self._mutable_dataset, - component=component, + component=component.value, elements_per_scenario=buffer.n_elements_per_scenario, total_elements=buffer.total_elements, indptr=buffer.indptr, diff --git a/src/power_grid_model/core/power_grid_meta.py b/src/power_grid_model/core/power_grid_meta.py index c85477d3a..a6874c7b5 100644 --- a/src/power_grid_model/core/power_grid_meta.py +++ b/src/power_grid_model/core/power_grid_meta.py @@ -12,8 +12,8 @@ import numpy as np +from power_grid_model.core.dataset_definitions import ComponentType, DataType, _str_to_componenttype, _str_to_datatype from power_grid_model.core.power_grid_core import AttributePtr, ComponentPtr, DatasetPtr, power_grid_core as pgc -from power_grid_model.dataset_definitions import ComponentType, DataType # constant enum for ctype @@ -75,7 +75,7 @@ def _generate_meta_data() -> PowerGridMetaData: n_datasets = pgc.meta_n_datasets() for i in range(n_datasets): dataset = pgc.meta_get_dataset_by_idx(i) - py_meta_data[pgc.meta_dataset_name(dataset)] = _generate_meta_dataset(dataset) + py_meta_data[_str_to_datatype(pgc.meta_dataset_name(dataset))] = _generate_meta_dataset(dataset) return py_meta_data @@ -92,7 +92,7 @@ def _generate_meta_dataset(dataset: DatasetPtr) -> DatasetMetaData: n_components = pgc.meta_n_components(dataset) for i in range(n_components): component = pgc.meta_get_component_by_idx(dataset, i) - py_meta_dataset[pgc.meta_component_name(component)] = _generate_meta_component(component) + py_meta_dataset[_str_to_componenttype(pgc.meta_component_name(component))] = _generate_meta_component(component) return py_meta_dataset @@ -158,7 +158,10 @@ def _generate_meta_attributes(component: ComponentPtr) -> dict: def initialize_array( - data_type: DataType, component_type: ComponentType, shape: Union[tuple, int], empty: bool = False + data_type: Union[str, DataType], + component_type: Union[str, ComponentType], + shape: Union[tuple, int], + empty: bool = False, ) -> np.ndarray: """ Initializes an array for use in Power Grid Model calculations @@ -174,6 +177,8 @@ def initialize_array( Returns: np structured array with all entries as null value """ + data_type = _str_to_datatype(data_type) + component_type = _str_to_componenttype(component_type) if not isinstance(shape, tuple): shape = (shape,) if empty: diff --git a/src/power_grid_model/core/power_grid_model.py b/src/power_grid_model/core/power_grid_model.py index 3dada181e..ea526ae50 100644 --- a/src/power_grid_model/core/power_grid_model.py +++ b/src/power_grid_model/core/power_grid_model.py @@ -6,7 +6,7 @@ Main power grid model class """ from enum import IntEnum -from typing import Dict, List, Optional, Set, Type, Union +from typing import Any, Dict, List, Optional, Set, Type, Union import numpy as np @@ -17,11 +17,11 @@ prepare_output_view, prepare_update_view, ) +from power_grid_model.core.dataset_definitions import ComponentType, _map_to_componenttypes, _str_to_datatype from power_grid_model.core.error_handling import PowerGridBatchError, assert_no_error, handle_errors from power_grid_model.core.index_integer import IdNp, IdxNp from power_grid_model.core.options import Options from power_grid_model.core.power_grid_core import ConstDatasetPtr, IDPtr, IdxPtr, ModelPtr, power_grid_core as pgc -from power_grid_model.dataset_definitions import ComponentType from power_grid_model.enum import ( CalculationMethod, CalculationType, @@ -94,7 +94,9 @@ def __new__(cls, *_args, **_kwargs): instance._all_component_count = None return instance - def __init__(self, input_data: Dict[ComponentType, np.ndarray], system_frequency: float = 50.0): + def __init__( + self, input_data: Union[Dict[ComponentType, np.ndarray], Dict[str, np.ndarray]], system_frequency: float = 50.0 + ): """ Initialize the model from an input data set. @@ -110,12 +112,13 @@ def __init__(self, input_data: Dict[ComponentType, np.ndarray], system_frequency pgc.destroy_model(self._model_ptr) self._all_component_count = None # create new + input_data = _map_to_componenttypes(input_data) prepared_input = prepare_input_view(input_data) self._model_ptr = pgc.create_model(system_frequency, input_data=prepared_input.get_dataset_ptr()) assert_no_error() self._all_component_count = {k: v for k, v in prepared_input.get_info().total_elements().items() if v > 0} - def update(self, *, update_data: Dict[ComponentType, np.ndarray]): + def update(self, *, update_data: Union[Dict[ComponentType, np.ndarray], Dict[str, np.ndarray]]): """ Update the model with changes. @@ -128,11 +131,12 @@ def update(self, *, update_data: Dict[ComponentType, np.ndarray]): Returns: None """ + update_data = _map_to_componenttypes(update_data) prepared_update = prepare_update_view(update_data) pgc.update_model(self._model, prepared_update.get_dataset_ptr()) assert_no_error() - def get_indexer(self, component_type: ComponentType, ids: np.ndarray): + def get_indexer(self, component_type: Any, ids: np.ndarray): """ Get array of indexers given array of ids for component type @@ -143,6 +147,7 @@ def get_indexer(self, component_type: ComponentType, ids: np.ndarray): Returns: Array of indexers, same shape as input array ids """ + component_type = _str_to_datatype(component_type) ids_c = np.ascontiguousarray(ids, dtype=IdNp).ctypes.data_as(IDPtr) indexer = np.empty_like(ids, dtype=IdxNp, order="C") indexer_c = indexer.ctypes.data_as(IdxPtr) @@ -154,14 +159,25 @@ def get_indexer(self, component_type: ComponentType, ids: np.ndarray): def _get_output_component_count(self, calculation_type: CalculationType): exclude_types = { - CalculationType.power_flow: ["sensor", "fault"], - CalculationType.state_estimation: ["fault"], - CalculationType.short_circuit: ["sensor"], + CalculationType.power_flow: [ + ComponentType.sym_voltage_sensor, + ComponentType.asym_voltage_sensor, + ComponentType.sym_power_sensor, + ComponentType.asym_power_sensor, + ComponentType.fault, + ], + CalculationType.state_estimation: [ComponentType.fault], + CalculationType.short_circuit: [ + ComponentType.sym_voltage_sensor, + ComponentType.asym_voltage_sensor, + ComponentType.sym_power_sensor, + ComponentType.asym_power_sensor, + ], }.get(calculation_type, []) def include_type(component_type: ComponentType): for exclude_type in exclude_types: - if exclude_type == component_type: + if exclude_type.value in component_type.value: return False return True @@ -386,13 +402,18 @@ def calculate_power_flow( error_tolerance: float = 1e-8, max_iterations: int = 20, calculation_method: Union[CalculationMethod, str] = CalculationMethod.newton_raphson, - update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[ + Union[ + Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]], + Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]], + ] + ] = None, threading: int = -1, output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, tap_changing_strategy: Union[TapChangingStrategy, str] = TapChangingStrategy.disabled, - ) -> Dict[ComponentType, np.ndarray]: + ) -> Dict[Any, np.ndarray]: """ Calculate power flow once with the current model attributes. Or calculate in batch with the given update dataset in batch. @@ -460,7 +481,7 @@ def calculate_power_flow( error_tolerance=error_tolerance, max_iterations=max_iterations, calculation_method=calculation_method, - update_data=update_data, + update_data=(_map_to_componenttypes(update_data) if update_data is not None else None), threading=threading, output_component_types=output_component_types, continue_on_batch_error=continue_on_batch_error, @@ -475,12 +496,17 @@ def calculate_state_estimation( error_tolerance: float = 1e-8, max_iterations: int = 20, calculation_method: Union[CalculationMethod, str] = CalculationMethod.iterative_linear, - update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[ + Union[ + Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]], + Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]], + ] + ] = None, threading: int = -1, output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, - ) -> Dict[ComponentType, np.ndarray]: + ) -> Dict[Any, np.ndarray]: """ Calculate state estimation once with the current model attributes. Or calculate in batch with the given update dataset in batch. @@ -545,7 +571,7 @@ def calculate_state_estimation( error_tolerance=error_tolerance, max_iterations=max_iterations, calculation_method=calculation_method, - update_data=update_data, + update_data=(_map_to_componenttypes(update_data) if update_data is not None else None), threading=threading, output_component_types=output_component_types, continue_on_batch_error=continue_on_batch_error, @@ -556,13 +582,18 @@ def calculate_short_circuit( self, *, calculation_method: Union[CalculationMethod, str] = CalculationMethod.iec60909, - update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[ + Union[ + Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]], + Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]], + ] + ] = None, threading: int = -1, output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, short_circuit_voltage_scaling: Union[ShortCircuitVoltageScaling, str] = ShortCircuitVoltageScaling.maximum, - ) -> Dict[ComponentType, np.ndarray]: + ) -> Dict[Any, np.ndarray]: """ Calculate a short circuit once with the current model attributes. Or calculate in batch with the given update dataset in batch @@ -619,7 +650,7 @@ def calculate_short_circuit( """ return self._calculate_short_circuit( calculation_method=calculation_method, - update_data=update_data, + update_data=(_map_to_componenttypes(update_data) if update_data is not None else None), threading=threading, output_component_types=output_component_types, continue_on_batch_error=continue_on_batch_error, diff --git a/src/power_grid_model/core/serialization.py b/src/power_grid_model/core/serialization.py index f1d526042..939302180 100644 --- a/src/power_grid_model/core/serialization.py +++ b/src/power_grid_model/core/serialization.py @@ -13,6 +13,7 @@ import numpy as np +from power_grid_model import ComponentType, DataType from power_grid_model.core.error_handling import assert_no_error from power_grid_model.core.index_integer import IdxC from power_grid_model.core.power_grid_core import ( @@ -23,7 +24,6 @@ power_grid_core as pgc, ) from power_grid_model.core.power_grid_dataset import CConstDataset, CWritableDataset -from power_grid_model.dataset_definitions import ComponentType, DataType from power_grid_model.errors import PowerGridSerializationError diff --git a/src/power_grid_model/data_types.py b/src/power_grid_model/data_types.py index 86e94de18..a98cc221d 100644 --- a/src/power_grid_model/data_types.py +++ b/src/power_grid_model/data_types.py @@ -11,7 +11,7 @@ import numpy as np -from power_grid_model.dataset_definitions import ComponentType +from power_grid_model import ComponentType # When we're dropping python 3.8, we should introduce proper NumPy type hinting diff --git a/src/power_grid_model/dataset_definitions.py b/src/power_grid_model/dataset_definitions.py deleted file mode 100644 index b6acfc126..000000000 --- a/src/power_grid_model/dataset_definitions.py +++ /dev/null @@ -1,109 +0,0 @@ -# SPDX-FileCopyrightText: Contributors to the Power Grid Model project -# -# SPDX-License-Identifier: MPL-2.0 - -"""Data types for power grid model dataset and component types.""" - -# This file is automatically generated. DO NOT modify it manually! - -import sys -from typing import Literal, Union - -# To avoid conflicts with src/power_grid_model/enum.py -# pylint: disable=wrong-import-position - -sys_path = sys.path.pop(0) -from enum import Enum - -# pylint: enable=wrong-import-position - -sys.path.append(sys_path) - -# Value names are defined in lower case instead of upper case -# pylint: disable=invalid-name - - -class DataTypes(Enum): - """Dataset types.""" - - input = "input" - sym_output = "sym_output" - asym_output = "asym_output" - update = "update" - sc_output = "sc_output" - - -DataTypesLiteral = Literal[ - "input", - "sym_output", - "asym_output", - "update", - "sc_output", -] - - -DataType = Union[DataTypes, DataTypesLiteral] -""" -A DataType is the type of a :class:`BatchDataset`. - -- Examples: - - - DataType.input = "input" - - DataType.update = "update" -""" - - -class ComponentTypes(Enum): - """Grid component types.""" - - node = "node" - line = "line" - link = "link" - transformer = "transformer" - transformer_tap_regulator = "transformer_tap_regulator" - three_winding_transformer = "three_winding_transformer" - sym_load = "sym_load" - sym_gen = "sym_gen" - asym_load = "asym_load" - asym_gen = "asym_gen" - shunt = "shunt" - source = "source" - sym_voltage_sensor = "sym_voltage_sensor" - asym_voltage_sensor = "asym_voltage_sensor" - sym_power_sensor = "sym_power_sensor" - asym_power_sensor = "asym_power_sensor" - fault = "fault" - - -ComponentTypesLiteral = Literal[ - "node", - "line", - "link", - "transformer", - "transformer_tap_regulator", - "three_winding_transformer", - "sym_load", - "sym_gen", - "asym_load", - "asym_gen", - "shunt", - "source", - "sym_voltage_sensor", - "asym_voltage_sensor", - "sym_power_sensor", - "asym_power_sensor", - "fault", -] - - -ComponentType = Union[ComponentTypes, ComponentTypesLiteral] -""" -A ComponentType is the type of a grid component. - -- Examples: - - - ComponentType.node = "node" - - ComponentType.line = "line" -""" - -# pylint: enable=invalid-name diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index 846af6e1a..034ef8500 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -13,6 +13,7 @@ import numpy as np +from power_grid_model import DataType from power_grid_model._utils import ( get_and_verify_batch_sizes as _get_and_verify_batch_sizes, get_batch_size as _get_batch_size, @@ -25,7 +26,6 @@ msgpack_serialize, ) from power_grid_model.data_types import BatchArray, BatchDataset, Dataset, SingleDataset -from power_grid_model.dataset_definitions import DataType from power_grid_model.errors import PowerGridSerializationError _DEPRECATED_FUNCTION_MSG = "This function is deprecated." @@ -253,7 +253,7 @@ def import_input_data(json_file: Path) -> SingleDataset: """ warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning) - data = _compatibility_deprecated_import_json_data(json_file=json_file, data_type="input") + data = _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DataType.input) assert isinstance(data, dict) assert all(isinstance(component, np.ndarray) and component.ndim == 1 for component in data.values()) return cast_type(SingleDataset, data) @@ -277,7 +277,7 @@ def import_update_data(json_file: Path) -> BatchDataset: return cast_type( BatchDataset, - _compatibility_deprecated_import_json_data(json_file=json_file, data_type="update"), + _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DataType.update), ) diff --git a/src/power_grid_model/validation/errors.py b/src/power_grid_model/validation/errors.py index e46f3cb28..a7ff2dbe4 100644 --- a/src/power_grid_model/validation/errors.py +++ b/src/power_grid_model/validation/errors.py @@ -10,7 +10,7 @@ from enum import Enum from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Type, Union -from power_grid_model.dataset_definitions import ComponentType +from power_grid_model import ComponentType class ValidationError(ABC): diff --git a/src/power_grid_model/validation/rules.py b/src/power_grid_model/validation/rules.py index 2e1438376..721b43b8b 100644 --- a/src/power_grid_model/validation/rules.py +++ b/src/power_grid_model/validation/rules.py @@ -39,8 +39,8 @@ import numpy as np +from power_grid_model import ComponentType from power_grid_model.data_types import SingleDataset -from power_grid_model.dataset_definitions import ComponentType from power_grid_model.enum import FaultPhase, FaultType, WindingType from power_grid_model.validation.errors import ( ComparisonError, diff --git a/src/power_grid_model/validation/utils.py b/src/power_grid_model/validation/utils.py index 899ee5061..90a167dac 100644 --- a/src/power_grid_model/validation/utils.py +++ b/src/power_grid_model/validation/utils.py @@ -10,9 +10,8 @@ import numpy as np -from power_grid_model import power_grid_meta_data +from power_grid_model import ComponentType, DataType, power_grid_meta_data from power_grid_model.data_types import SingleDataset -from power_grid_model.dataset_definitions import ComponentType, DataType from power_grid_model.validation.errors import ValidationError @@ -105,7 +104,7 @@ def update_component_data(component: ComponentType, input_data: np.ndarray, upda for field in update_data.dtype.names: if field == "id": continue - nan = nan_type(component, field, "update") + nan = nan_type(component, field, DataType.input) if np.isnan(nan): mask = ~np.isnan(update_data[field]) else: @@ -162,7 +161,7 @@ def errors_to_string( return msg -def nan_type(component: ComponentType, field: str, data_type: DataType = "input"): +def nan_type(component: ComponentType, field: str, data_type: DataType = DataType.input): """ Helper function to retrieve the nan value for a certain field as defined in the power_grid_meta_data. """ diff --git a/src/power_grid_model/validation/validation.py b/src/power_grid_model/validation/validation.py index 30620fdc6..daeb99582 100644 --- a/src/power_grid_model/validation/validation.py +++ b/src/power_grid_model/validation/validation.py @@ -15,10 +15,9 @@ import numpy as np -from power_grid_model import power_grid_meta_data +from power_grid_model import ComponentType, DataType, power_grid_meta_data from power_grid_model._utils import convert_batch_dataset_to_batch_list from power_grid_model.data_types import BatchDataset, Dataset, SingleDataset -from power_grid_model.dataset_definitions import ComponentType, DataType from power_grid_model.enum import ( Branch3Side, BranchSide, @@ -86,7 +85,7 @@ def validate_input_data( """ # A deep copy is made of the input data, since default values will be added in the validation process input_data_copy = copy.deepcopy(input_data) - assert_valid_data_structure(input_data_copy, "input") + assert_valid_data_structure(input_data_copy, DataType.input) errors: List[ValidationError] = [] errors += validate_required_values(input_data_copy, calculation_type, symmetric) @@ -128,7 +127,7 @@ def validate_batch_data( Raises: Error: KeyError | TypeError | ValueError: if the data structure is invalid. """ - assert_valid_data_structure(input_data, "input") + assert_valid_data_structure(input_data, DataType.input) input_errors: List[ValidationError] = list(validate_unique_ids_across_components(input_data)) @@ -137,7 +136,7 @@ def validate_batch_data( errors = {} for batch, batch_update_data in enumerate(batch_data): - assert_valid_data_structure(batch_update_data, "update") + assert_valid_data_structure(batch_update_data, DataType.update) id_errors: List[ValidationError] = list(validate_ids_exist(batch_update_data, input_data)) batch_errors = input_errors + id_errors @@ -165,7 +164,7 @@ def assert_valid_data_structure(data: Dataset, data_type: DataType) -> None: Error: KeyError, TypeError """ - if data_type not in {"input", "update"}: + if data_type not in {DataType.input, DataType.update}: raise KeyError(f"Unexpected data type '{data_type}' (should be 'input' or 'update')") component_dtype = {component: meta.dtype for component, meta in power_grid_meta_data[data_type].items()} @@ -378,7 +377,7 @@ def validate_required_values( if calculation_type is None or calculation_type == CalculationType.short_circuit: required["fault"] += ["status", "fault_type"] if "fault" in data: - for elem in data["fault"]["fault_type"]: + for elem in data[ComponentType.fault]["fault_type"]: if elem not in (FaultType.three_phase, FaultType.nan): asym_sc = True break @@ -387,8 +386,8 @@ def validate_required_values( required["line"] += ["r0", "x0", "c0", "tan0"] required["shunt"] += ["g0", "b0"] - _process_power_sigma_and_p_q_sigma(data, "sym_power_sensor", required) - _process_power_sigma_and_p_q_sigma(data, "asym_power_sensor", required) + _process_power_sigma_and_p_q_sigma(data, ComponentType.sym_power_sensor, required) + _process_power_sigma_and_p_q_sigma(data, ComponentType.asym_power_sensor, required) return _validate_required_in_data(data, required) @@ -449,10 +448,10 @@ def validate_values(data: SingleDataset, calculation_type: Optional[CalculationT all_finite( data, { - "sym_power_sensor": ["power_sigma"], - "asym_power_sensor": ["power_sigma"], - "sym_voltage_sensor": ["u_sigma"], - "asym_voltage_sensor": ["u_sigma"], + ComponentType.sym_power_sensor: ["power_sigma"], + ComponentType.asym_power_sensor: ["power_sigma"], + ComponentType.sym_voltage_sensor: ["u_sigma"], + ComponentType.asym_voltage_sensor: ["u_sigma"], }, ) ) @@ -460,14 +459,14 @@ def validate_values(data: SingleDataset, calculation_type: Optional[CalculationT component_validators = { "node": validate_node, "line": validate_line, - "link": lambda d: validate_branch(d, "link"), + "link": lambda d: validate_branch(d, ComponentType.link), "transformer": validate_transformer, "three_winding_transformer": validate_three_winding_transformer, "source": validate_source, - "sym_load": lambda d: validate_generic_load_gen(d, "sym_load"), - "sym_gen": lambda d: validate_generic_load_gen(d, "sym_gen"), - "asym_load": lambda d: validate_generic_load_gen(d, "asym_load"), - "asym_gen": lambda d: validate_generic_load_gen(d, "asym_gen"), + "sym_load": lambda d: validate_generic_load_gen(d, ComponentType.sym_load), + "sym_gen": lambda d: validate_generic_load_gen(d, ComponentType.sym_gen), + "asym_load": lambda d: validate_generic_load_gen(d, ComponentType.asym_load), + "asym_gen": lambda d: validate_generic_load_gen(d, ComponentType.asym_gen), "shunt": validate_shunt, } @@ -477,13 +476,13 @@ def validate_values(data: SingleDataset, calculation_type: Optional[CalculationT if calculation_type in (None, CalculationType.state_estimation): if "sym_voltage_sensor" in data: - errors += validate_generic_voltage_sensor(data, "sym_voltage_sensor") + errors += validate_generic_voltage_sensor(data, ComponentType.sym_voltage_sensor) if "asym_voltage_sensor" in data: - errors += validate_generic_voltage_sensor(data, "asym_voltage_sensor") + errors += validate_generic_voltage_sensor(data, ComponentType.asym_voltage_sensor) if "sym_power_sensor" in data: - errors += validate_generic_power_sensor(data, "sym_power_sensor") + errors += validate_generic_power_sensor(data, ComponentType.sym_power_sensor) if "asym_power_sensor" in data: - errors += validate_generic_power_sensor(data, "asym_power_sensor") + errors += validate_generic_power_sensor(data, ComponentType.asym_power_sensor) if calculation_type in (None, CalculationType.short_circuit) and "fault" in data: errors += validate_fault(data) @@ -503,15 +502,15 @@ def validate_base(data: SingleDataset, component: ComponentType) -> List[Validat def validate_node(data: SingleDataset) -> List[ValidationError]: - errors = validate_base(data, "node") - errors += all_greater_than_zero(data, "node", "u_rated") + errors = validate_base(data, ComponentType.node) + errors += all_greater_than_zero(data, ComponentType.node, "u_rated") return errors def validate_branch(data: SingleDataset, component: ComponentType) -> List[ValidationError]: errors = validate_base(data, component) - errors += all_valid_ids(data, component, "from_node", "node") - errors += all_valid_ids(data, component, "to_node", "node") + errors += all_valid_ids(data, component, "from_node", ComponentType.node) + errors += all_valid_ids(data, component, "to_node", ComponentType.node) errors += all_not_two_values_equal(data, component, "to_node", "from_node") errors += all_boolean(data, component, "from_status") errors += all_boolean(data, component, "to_status") @@ -519,46 +518,56 @@ def validate_branch(data: SingleDataset, component: ComponentType) -> List[Valid def validate_line(data: SingleDataset) -> List[ValidationError]: - errors = validate_branch(data, "line") - errors += all_not_two_values_zero(data, "line", "r1", "x1") - errors += all_not_two_values_zero(data, "line", "r0", "x0") - errors += all_greater_than_zero(data, "line", "i_n") + errors = validate_branch(data, ComponentType.line) + errors += all_not_two_values_zero(data, ComponentType.line, "r1", "x1") + errors += all_not_two_values_zero(data, ComponentType.line, "r0", "x0") + errors += all_greater_than_zero(data, ComponentType.line, "i_n") return errors def validate_transformer(data: SingleDataset) -> List[ValidationError]: - errors = validate_branch(data, "transformer") - errors += all_greater_than_zero(data, "transformer", "u1") - errors += all_greater_than_zero(data, "transformer", "u2") - errors += all_greater_than_zero(data, "transformer", "sn") - errors += all_greater_or_equal(data, "transformer", "uk", "pk/sn") - errors += all_between(data, "transformer", "uk", 0, 1) - errors += all_greater_than_or_equal_to_zero(data, "transformer", "pk") - errors += all_greater_or_equal(data, "transformer", "i0", "p0/sn") - errors += all_less_than(data, "transformer", "i0", 1) - errors += all_greater_than_or_equal_to_zero(data, "transformer", "p0") - errors += all_valid_enum_values(data, "transformer", "winding_from", WindingType) - errors += all_valid_enum_values(data, "transformer", "winding_to", WindingType) - errors += all_between_or_at(data, "transformer", "clock", 0, 12) - errors += all_valid_clocks(data, "transformer", "clock", "winding_from", "winding_to") - errors += all_valid_enum_values(data, "transformer", "tap_side", BranchSide) - errors += all_between_or_at(data, "transformer", "tap_pos", "tap_min", "tap_max", data["transformer"]["tap_nom"], 0) - errors += all_between_or_at(data, "transformer", "tap_nom", "tap_min", "tap_max", 0) - errors += all_greater_than_or_equal_to_zero(data, "transformer", "tap_size") - errors += all_greater_or_equal(data, "transformer", "uk_min", "pk_min/sn", data["transformer"]["uk"]) - errors += all_between(data, "transformer", "uk_min", 0, 1, data["transformer"]["uk"]) - errors += all_greater_or_equal(data, "transformer", "uk_max", "pk_max/sn", data["transformer"]["uk"]) - errors += all_between(data, "transformer", "uk_max", 0, 1, data["transformer"]["uk"]) - errors += all_greater_than_or_equal_to_zero(data, "transformer", "pk_min", data["transformer"]["pk"]) - errors += all_greater_than_or_equal_to_zero(data, "transformer", "pk_max", data["transformer"]["pk"]) + errors = validate_branch(data, ComponentType.transformer) + errors += all_greater_than_zero(data, ComponentType.transformer, "u1") + errors += all_greater_than_zero(data, ComponentType.transformer, "u2") + errors += all_greater_than_zero(data, ComponentType.transformer, "sn") + errors += all_greater_or_equal(data, ComponentType.transformer, "uk", "pk/sn") + errors += all_between(data, ComponentType.transformer, "uk", 0, 1) + errors += all_greater_than_or_equal_to_zero(data, ComponentType.transformer, "pk") + errors += all_greater_or_equal(data, ComponentType.transformer, "i0", "p0/sn") + errors += all_less_than(data, ComponentType.transformer, "i0", 1) + errors += all_greater_than_or_equal_to_zero(data, ComponentType.transformer, "p0") + errors += all_valid_enum_values(data, ComponentType.transformer, "winding_from", WindingType) + errors += all_valid_enum_values(data, ComponentType.transformer, "winding_to", WindingType) + errors += all_between_or_at(data, ComponentType.transformer, "clock", 0, 12) + errors += all_valid_clocks(data, ComponentType.transformer, "clock", "winding_from", "winding_to") + errors += all_valid_enum_values(data, ComponentType.transformer, "tap_side", BranchSide) + errors += all_between_or_at( + data, ComponentType.transformer, "tap_pos", "tap_min", "tap_max", data[ComponentType.transformer]["tap_nom"], 0 + ) + errors += all_between_or_at(data, ComponentType.transformer, "tap_nom", "tap_min", "tap_max", 0) + errors += all_greater_than_or_equal_to_zero(data, ComponentType.transformer, "tap_size") + errors += all_greater_or_equal( + data, ComponentType.transformer, "uk_min", "pk_min/sn", data[ComponentType.transformer]["uk"] + ) + errors += all_between(data, ComponentType.transformer, "uk_min", 0, 1, data[ComponentType.transformer]["uk"]) + errors += all_greater_or_equal( + data, ComponentType.transformer, "uk_max", "pk_max/sn", data[ComponentType.transformer]["uk"] + ) + errors += all_between(data, ComponentType.transformer, "uk_max", 0, 1, data[ComponentType.transformer]["uk"]) + errors += all_greater_than_or_equal_to_zero( + data, ComponentType.transformer, "pk_min", data[ComponentType.transformer]["pk"] + ) + errors += all_greater_than_or_equal_to_zero( + data, ComponentType.transformer, "pk_max", data[ComponentType.transformer]["pk"] + ) return errors def validate_branch3(data: SingleDataset, component: ComponentType) -> List[ValidationError]: errors = validate_base(data, component) - errors += all_valid_ids(data, component, "node_1", "node") - errors += all_valid_ids(data, component, "node_2", "node") - errors += all_valid_ids(data, component, "node_3", "node") + errors += all_valid_ids(data, component, "node_1", ComponentType.node) + errors += all_valid_ids(data, component, "node_2", ComponentType.node) + errors += all_valid_ids(data, component, "node_3", ComponentType.node) errors += all_not_two_values_equal(data, component, "node_1", "node_2") errors += all_not_two_values_equal(data, component, "node_1", "node_3") errors += all_not_two_values_equal(data, component, "node_2", "node_3") @@ -570,118 +579,214 @@ def validate_branch3(data: SingleDataset, component: ComponentType) -> List[Vali # pylint: disable=R0915 def validate_three_winding_transformer(data: SingleDataset) -> List[ValidationError]: - errors = validate_branch3(data, "three_winding_transformer") - errors += all_greater_than_zero(data, "three_winding_transformer", "u1") - errors += all_greater_than_zero(data, "three_winding_transformer", "u2") - errors += all_greater_than_zero(data, "three_winding_transformer", "u3") - errors += all_greater_than_zero(data, "three_winding_transformer", "sn_1") - errors += all_greater_than_zero(data, "three_winding_transformer", "sn_2") - errors += all_greater_than_zero(data, "three_winding_transformer", "sn_3") - errors += all_greater_or_equal(data, "three_winding_transformer", "uk_12", "pk_12/sn_1") - errors += all_greater_or_equal(data, "three_winding_transformer", "uk_12", "pk_12/sn_2") - errors += all_greater_or_equal(data, "three_winding_transformer", "uk_13", "pk_13/sn_1") - errors += all_greater_or_equal(data, "three_winding_transformer", "uk_13", "pk_13/sn_3") - errors += all_greater_or_equal(data, "three_winding_transformer", "uk_23", "pk_23/sn_2") - errors += all_greater_or_equal(data, "three_winding_transformer", "uk_23", "pk_23/sn_3") - errors += all_between(data, "three_winding_transformer", "uk_12", 0, 1) - errors += all_between(data, "three_winding_transformer", "uk_13", 0, 1) - errors += all_between(data, "three_winding_transformer", "uk_23", 0, 1) - errors += all_greater_than_or_equal_to_zero(data, "three_winding_transformer", "pk_12") - errors += all_greater_than_or_equal_to_zero(data, "three_winding_transformer", "pk_13") - errors += all_greater_than_or_equal_to_zero(data, "three_winding_transformer", "pk_23") - errors += all_greater_or_equal(data, "three_winding_transformer", "i0", "p0/sn_1") - errors += all_less_than(data, "three_winding_transformer", "i0", 1) - errors += all_greater_than_or_equal_to_zero(data, "three_winding_transformer", "p0") - errors += all_valid_enum_values(data, "three_winding_transformer", "winding_1", WindingType) - errors += all_valid_enum_values(data, "three_winding_transformer", "winding_2", WindingType) - errors += all_valid_enum_values(data, "three_winding_transformer", "winding_3", WindingType) - errors += all_between_or_at(data, "three_winding_transformer", "clock_12", 0, 12) - errors += all_between_or_at(data, "three_winding_transformer", "clock_13", 0, 12) - errors += all_valid_clocks(data, "three_winding_transformer", "clock_12", "winding_1", "winding_2") - errors += all_valid_clocks(data, "three_winding_transformer", "clock_13", "winding_1", "winding_3") - errors += all_valid_enum_values(data, "three_winding_transformer", "tap_side", Branch3Side) + errors = validate_branch3(data, ComponentType.three_winding_transformer) + errors += all_greater_than_zero(data, ComponentType.three_winding_transformer, "u1") + errors += all_greater_than_zero(data, ComponentType.three_winding_transformer, "u2") + errors += all_greater_than_zero(data, ComponentType.three_winding_transformer, "u3") + errors += all_greater_than_zero(data, ComponentType.three_winding_transformer, "sn_1") + errors += all_greater_than_zero(data, ComponentType.three_winding_transformer, "sn_2") + errors += all_greater_than_zero(data, ComponentType.three_winding_transformer, "sn_3") + errors += all_greater_or_equal(data, ComponentType.three_winding_transformer, "uk_12", "pk_12/sn_1") + errors += all_greater_or_equal(data, ComponentType.three_winding_transformer, "uk_12", "pk_12/sn_2") + errors += all_greater_or_equal(data, ComponentType.three_winding_transformer, "uk_13", "pk_13/sn_1") + errors += all_greater_or_equal(data, ComponentType.three_winding_transformer, "uk_13", "pk_13/sn_3") + errors += all_greater_or_equal(data, ComponentType.three_winding_transformer, "uk_23", "pk_23/sn_2") + errors += all_greater_or_equal(data, ComponentType.three_winding_transformer, "uk_23", "pk_23/sn_3") + errors += all_between(data, ComponentType.three_winding_transformer, "uk_12", 0, 1) + errors += all_between(data, ComponentType.three_winding_transformer, "uk_13", 0, 1) + errors += all_between(data, ComponentType.three_winding_transformer, "uk_23", 0, 1) + errors += all_greater_than_or_equal_to_zero(data, ComponentType.three_winding_transformer, "pk_12") + errors += all_greater_than_or_equal_to_zero(data, ComponentType.three_winding_transformer, "pk_13") + errors += all_greater_than_or_equal_to_zero(data, ComponentType.three_winding_transformer, "pk_23") + errors += all_greater_or_equal(data, ComponentType.three_winding_transformer, "i0", "p0/sn_1") + errors += all_less_than(data, ComponentType.three_winding_transformer, "i0", 1) + errors += all_greater_than_or_equal_to_zero(data, ComponentType.three_winding_transformer, "p0") + errors += all_valid_enum_values(data, ComponentType.three_winding_transformer, "winding_1", WindingType) + errors += all_valid_enum_values(data, ComponentType.three_winding_transformer, "winding_2", WindingType) + errors += all_valid_enum_values(data, ComponentType.three_winding_transformer, "winding_3", WindingType) + errors += all_between_or_at(data, ComponentType.three_winding_transformer, "clock_12", 0, 12) + errors += all_between_or_at(data, ComponentType.three_winding_transformer, "clock_13", 0, 12) + errors += all_valid_clocks(data, ComponentType.three_winding_transformer, "clock_12", "winding_1", "winding_2") + errors += all_valid_clocks(data, ComponentType.three_winding_transformer, "clock_13", "winding_1", "winding_3") + errors += all_valid_enum_values(data, ComponentType.three_winding_transformer, "tap_side", Branch3Side) errors += all_between_or_at( data, - "three_winding_transformer", + ComponentType.three_winding_transformer, "tap_pos", "tap_min", "tap_max", - data["three_winding_transformer"]["tap_nom"], + data[ComponentType.three_winding_transformer]["tap_nom"], 0, ) - errors += all_between_or_at(data, "three_winding_transformer", "tap_nom", "tap_min", "tap_max", 0) - errors += all_greater_than_or_equal_to_zero(data, "three_winding_transformer", "tap_size") + errors += all_between_or_at(data, ComponentType.three_winding_transformer, "tap_nom", "tap_min", "tap_max", 0) + errors += all_greater_than_or_equal_to_zero(data, ComponentType.three_winding_transformer, "tap_size") errors += all_greater_or_equal( - data, "three_winding_transformer", "uk_12_min", "pk_12_min/sn_1", data["three_winding_transformer"]["uk_12"] + data, + ComponentType.three_winding_transformer, + "uk_12_min", + "pk_12_min/sn_1", + data[ComponentType.three_winding_transformer]["uk_12"], ) errors += all_greater_or_equal( - data, "three_winding_transformer", "uk_12_min", "pk_12_min/sn_2", data["three_winding_transformer"]["uk_12"] + data, + ComponentType.three_winding_transformer, + "uk_12_min", + "pk_12_min/sn_2", + data[ComponentType.three_winding_transformer]["uk_12"], ) errors += all_greater_or_equal( - data, "three_winding_transformer", "uk_13_min", "pk_13_min/sn_1", data["three_winding_transformer"]["uk_13"] + data, + ComponentType.three_winding_transformer, + "uk_13_min", + "pk_13_min/sn_1", + data[ComponentType.three_winding_transformer]["uk_13"], ) errors += all_greater_or_equal( - data, "three_winding_transformer", "uk_13_min", "pk_13_min/sn_3", data["three_winding_transformer"]["uk_13"] + data, + ComponentType.three_winding_transformer, + "uk_13_min", + "pk_13_min/sn_3", + data[ComponentType.three_winding_transformer]["uk_13"], ) errors += all_greater_or_equal( - data, "three_winding_transformer", "uk_23_min", "pk_23_min/sn_2", data["three_winding_transformer"]["uk_23"] + data, + ComponentType.three_winding_transformer, + "uk_23_min", + "pk_23_min/sn_2", + data[ComponentType.three_winding_transformer]["uk_23"], ) errors += all_greater_or_equal( - data, "three_winding_transformer", "uk_23_min", "pk_23_min/sn_3", data["three_winding_transformer"]["uk_23"] + data, + ComponentType.three_winding_transformer, + "uk_23_min", + "pk_23_min/sn_3", + data[ComponentType.three_winding_transformer]["uk_23"], ) errors += all_between( - data, "three_winding_transformer", "uk_12_min", 0, 1, data["three_winding_transformer"]["uk_12"] + data, + ComponentType.three_winding_transformer, + "uk_12_min", + 0, + 1, + data[ComponentType.three_winding_transformer]["uk_12"], ) errors += all_between( - data, "three_winding_transformer", "uk_13_min", 0, 1, data["three_winding_transformer"]["uk_13"] + data, + ComponentType.three_winding_transformer, + "uk_13_min", + 0, + 1, + data[ComponentType.three_winding_transformer]["uk_13"], ) errors += all_between( - data, "three_winding_transformer", "uk_23_min", 0, 1, data["three_winding_transformer"]["uk_23"] + data, + ComponentType.three_winding_transformer, + "uk_23_min", + 0, + 1, + data[ComponentType.three_winding_transformer]["uk_23"], ) errors += all_greater_or_equal( - data, "three_winding_transformer", "uk_12_max", "pk_12_max/sn_1", data["three_winding_transformer"]["uk_12"] + data, + ComponentType.three_winding_transformer, + "uk_12_max", + "pk_12_max/sn_1", + data[ComponentType.three_winding_transformer]["uk_12"], ) errors += all_greater_or_equal( - data, "three_winding_transformer", "uk_12_max", "pk_12_max/sn_2", data["three_winding_transformer"]["uk_12"] + data, + ComponentType.three_winding_transformer, + "uk_12_max", + "pk_12_max/sn_2", + data[ComponentType.three_winding_transformer]["uk_12"], ) errors += all_greater_or_equal( - data, "three_winding_transformer", "uk_13_max", "pk_13_max/sn_1", data["three_winding_transformer"]["uk_13"] + data, + ComponentType.three_winding_transformer, + "uk_13_max", + "pk_13_max/sn_1", + data[ComponentType.three_winding_transformer]["uk_13"], ) errors += all_greater_or_equal( - data, "three_winding_transformer", "uk_13_max", "pk_13_max/sn_3", data["three_winding_transformer"]["uk_13"] + data, + ComponentType.three_winding_transformer, + "uk_13_max", + "pk_13_max/sn_3", + data[ComponentType.three_winding_transformer]["uk_13"], ) errors += all_greater_or_equal( - data, "three_winding_transformer", "uk_23_max", "pk_23_max/sn_2", data["three_winding_transformer"]["uk_23"] + data, + ComponentType.three_winding_transformer, + "uk_23_max", + "pk_23_max/sn_2", + data[ComponentType.three_winding_transformer]["uk_23"], ) errors += all_greater_or_equal( - data, "three_winding_transformer", "uk_23_max", "pk_23_max/sn_3", data["three_winding_transformer"]["uk_23"] + data, + ComponentType.three_winding_transformer, + "uk_23_max", + "pk_23_max/sn_3", + data[ComponentType.three_winding_transformer]["uk_23"], ) errors += all_between( - data, "three_winding_transformer", "uk_12_max", 0, 1, data["three_winding_transformer"]["uk_12"] + data, + ComponentType.three_winding_transformer, + "uk_12_max", + 0, + 1, + data[ComponentType.three_winding_transformer]["uk_12"], ) errors += all_between( - data, "three_winding_transformer", "uk_13_max", 0, 1, data["three_winding_transformer"]["uk_13"] + data, + ComponentType.three_winding_transformer, + "uk_13_max", + 0, + 1, + data[ComponentType.three_winding_transformer]["uk_13"], ) errors += all_between( - data, "three_winding_transformer", "uk_23_max", 0, 1, data["three_winding_transformer"]["uk_23"] + data, + ComponentType.three_winding_transformer, + "uk_23_max", + 0, + 1, + data[ComponentType.three_winding_transformer]["uk_23"], ) errors += all_greater_than_or_equal_to_zero( - data, "three_winding_transformer", "pk_12_min", data["three_winding_transformer"]["pk_12"] + data, + ComponentType.three_winding_transformer, + "pk_12_min", + data[ComponentType.three_winding_transformer]["pk_12"], ) errors += all_greater_than_or_equal_to_zero( - data, "three_winding_transformer", "pk_13_min", data["three_winding_transformer"]["pk_13"] + data, + ComponentType.three_winding_transformer, + "pk_13_min", + data[ComponentType.three_winding_transformer]["pk_13"], ) errors += all_greater_than_or_equal_to_zero( - data, "three_winding_transformer", "pk_23_min", data["three_winding_transformer"]["pk_23"] + data, + ComponentType.three_winding_transformer, + "pk_23_min", + data[ComponentType.three_winding_transformer]["pk_23"], ) errors += all_greater_than_or_equal_to_zero( - data, "three_winding_transformer", "pk_12_max", data["three_winding_transformer"]["pk_12"] + data, + ComponentType.three_winding_transformer, + "pk_12_max", + data[ComponentType.three_winding_transformer]["pk_12"], ) errors += all_greater_than_or_equal_to_zero( - data, "three_winding_transformer", "pk_13_max", data["three_winding_transformer"]["pk_13"] + data, + ComponentType.three_winding_transformer, + "pk_13_max", + data[ComponentType.three_winding_transformer]["pk_13"], ) errors += all_greater_than_or_equal_to_zero( - data, "three_winding_transformer", "pk_23_max", data["three_winding_transformer"]["pk_23"] + data, + ComponentType.three_winding_transformer, + "pk_23_max", + data[ComponentType.three_winding_transformer]["pk_23"], ) return errors @@ -689,16 +794,16 @@ def validate_three_winding_transformer(data: SingleDataset) -> List[ValidationEr def validate_appliance(data: SingleDataset, component: ComponentType) -> List[ValidationError]: errors = validate_base(data, component) errors += all_boolean(data, component, "status") - errors += all_valid_ids(data, component, "node", "node") + errors += all_valid_ids(data, component, "node", ComponentType.node) return errors def validate_source(data: SingleDataset) -> List[ValidationError]: - errors = validate_appliance(data, "source") - errors += all_greater_than_zero(data, "source", "u_ref") - errors += all_greater_than_zero(data, "source", "sk") - errors += all_greater_than_or_equal_to_zero(data, "source", "rx_ratio") - errors += all_greater_than_zero(data, "source", "z01_ratio") + errors = validate_appliance(data, ComponentType.source) + errors += all_greater_than_zero(data, ComponentType.source, "u_ref") + errors += all_greater_than_zero(data, ComponentType.source, "sk") + errors += all_greater_than_or_equal_to_zero(data, ComponentType.source, "rx_ratio") + errors += all_greater_than_zero(data, ComponentType.source, "z01_ratio") return errors @@ -709,7 +814,7 @@ def validate_generic_load_gen(data: SingleDataset, component: ComponentType) -> def validate_shunt(data: SingleDataset) -> List[ValidationError]: - errors = validate_appliance(data, "shunt") + errors = validate_appliance(data, ComponentType.shunt) return errors @@ -717,7 +822,7 @@ def validate_generic_voltage_sensor(data: SingleDataset, component: ComponentTyp errors = validate_base(data, component) errors += all_greater_than_zero(data, component, "u_sigma") errors += all_greater_than_zero(data, component, "u_measured") - errors += all_valid_ids(data, component, "measured_object", "node") + errors += all_valid_ids(data, component, "measured_object", ComponentType.node) return errors @@ -730,86 +835,86 @@ def validate_generic_power_sensor(data: SingleDataset, component: ComponentType) component, field="measured_object", ref_components=[ - "node", - "line", - "transformer", - "three_winding_transformer", - "source", - "shunt", - "sym_load", - "asym_load", - "sym_gen", - "asym_gen", + ComponentType.node, + ComponentType.line, + ComponentType.transformer, + ComponentType.three_winding_transformer, + ComponentType.source, + ComponentType.shunt, + ComponentType.sym_load, + ComponentType.asym_load, + ComponentType.sym_gen, + ComponentType.asym_gen, ], ) errors += all_valid_ids( data, component, field="measured_object", - ref_components=["line", "transformer"], + ref_components=[ComponentType.line, ComponentType.transformer], measured_terminal_type=MeasuredTerminalType.branch_from, ) errors += all_valid_ids( data, component, field="measured_object", - ref_components=["line", "transformer"], + ref_components=[ComponentType.line, ComponentType.transformer], measured_terminal_type=MeasuredTerminalType.branch_to, ) errors += all_valid_ids( data, component, field="measured_object", - ref_components="source", + ref_components=ComponentType.source, measured_terminal_type=MeasuredTerminalType.source, ) errors += all_valid_ids( data, component, field="measured_object", - ref_components="shunt", + ref_components=ComponentType.shunt, measured_terminal_type=MeasuredTerminalType.shunt, ) errors += all_valid_ids( data, component, field="measured_object", - ref_components=["sym_load", "asym_load"], + ref_components=[ComponentType.sym_load, ComponentType.asym_load], measured_terminal_type=MeasuredTerminalType.load, ) errors += all_valid_ids( data, component, field="measured_object", - ref_components=["sym_gen", "asym_gen"], + ref_components=[ComponentType.sym_gen, ComponentType.asym_gen], measured_terminal_type=MeasuredTerminalType.generator, ) errors += all_valid_ids( data, component, field="measured_object", - ref_components="three_winding_transformer", + ref_components=ComponentType.three_winding_transformer, measured_terminal_type=MeasuredTerminalType.branch3_1, ) errors += all_valid_ids( data, component, field="measured_object", - ref_components="three_winding_transformer", + ref_components=ComponentType.three_winding_transformer, measured_terminal_type=MeasuredTerminalType.branch3_2, ) errors += all_valid_ids( data, component, field="measured_object", - ref_components="three_winding_transformer", + ref_components=ComponentType.three_winding_transformer, measured_terminal_type=MeasuredTerminalType.branch3_3, ) errors += all_valid_ids( data, component, field="measured_object", - ref_components="node", + ref_components=ComponentType.node, measured_terminal_type=MeasuredTerminalType.node, ) if component in ("sym_power_sensor", "asym_power_sensor"): @@ -819,15 +924,15 @@ def validate_generic_power_sensor(data: SingleDataset, component: ComponentType) def validate_fault(data: SingleDataset) -> List[ValidationError]: - errors = validate_base(data, "fault") - errors += all_boolean(data, "fault", "status") - errors += all_valid_enum_values(data, "fault", "fault_type", FaultType) - errors += all_valid_enum_values(data, "fault", "fault_phase", FaultPhase) - errors += all_valid_fault_phases(data, "fault", "fault_type", "fault_phase") - errors += all_valid_ids(data, "fault", field="fault_object", ref_components="node") - errors += all_greater_than_or_equal_to_zero(data, "fault", "r_f") - errors += all_enabled_identical(data, "fault", "fault_type", "status") - errors += all_enabled_identical(data, "fault", "fault_phase", "status") + errors = validate_base(data, ComponentType.fault) + errors += all_boolean(data, ComponentType.fault, "status") + errors += all_valid_enum_values(data, ComponentType.fault, "fault_type", FaultType) + errors += all_valid_enum_values(data, ComponentType.fault, "fault_phase", FaultPhase) + errors += all_valid_fault_phases(data, ComponentType.fault, "fault_type", "fault_phase") + errors += all_valid_ids(data, ComponentType.fault, field="fault_object", ref_components=ComponentType.node) + errors += all_greater_than_or_equal_to_zero(data, ComponentType.fault, "r_f") + errors += all_enabled_identical(data, ComponentType.fault, "fault_type", "status") + errors += all_enabled_identical(data, ComponentType.fault, "fault_phase", "status") return errors @@ -837,35 +942,46 @@ def validate_regulator(data: SingleDataset, component: ComponentType) -> List[Va data, component, field="regulated_object", - ref_components=["transformer", "three_winding_transformer"], + ref_components=[ComponentType.transformer, ComponentType.three_winding_transformer], ) return errors def validate_transformer_tap_regulator(data: SingleDataset) -> List[ValidationError]: - errors = validate_regulator(data, "transformer_tap_regulator") - errors += all_boolean(data, "transformer_tap_regulator", "status") - errors += all_valid_enum_values(data, "transformer_tap_regulator", "control_side", [BranchSide, Branch3Side]) + errors = validate_regulator(data, ComponentType.transformer_tap_regulator) + errors += all_boolean(data, ComponentType.transformer_tap_regulator, "status") + errors += all_valid_enum_values( + data, ComponentType.transformer_tap_regulator, "control_side", [BranchSide, Branch3Side] + ) errors += all_valid_associated_enum_values( - data, "transformer_tap_regulator", "control_side", "regulated_object", ["transformer"], [BranchSide] + data, + ComponentType.transformer_tap_regulator, + "control_side", + "regulated_object", + [ComponentType.transformer], + [BranchSide], ) errors += all_valid_associated_enum_values( data, - "transformer_tap_regulator", + ComponentType.transformer_tap_regulator, "control_side", "regulated_object", - ["three_winding_transformer"], + [ComponentType.three_winding_transformer], [Branch3Side], ) - errors += all_greater_than_or_equal_to_zero(data, "transformer_tap_regulator", "u_set") - errors += all_greater_than_zero(data, "transformer_tap_regulator", "u_band") - errors += all_greater_than_or_equal_to_zero(data, "transformer_tap_regulator", "line_drop_compensation_r", 0.0) - errors += all_greater_than_or_equal_to_zero(data, "transformer_tap_regulator", "line_drop_compensation_x", 0.0) + errors += all_greater_than_or_equal_to_zero(data, ComponentType.transformer_tap_regulator, "u_set") + errors += all_greater_than_zero(data, ComponentType.transformer_tap_regulator, "u_band") + errors += all_greater_than_or_equal_to_zero( + data, ComponentType.transformer_tap_regulator, "line_drop_compensation_r", 0.0 + ) + errors += all_greater_than_or_equal_to_zero( + data, ComponentType.transformer_tap_regulator, "line_drop_compensation_x", 0.0 + ) errors += all_supported_tap_control_side( data, - "transformer_tap_regulator", + ComponentType.transformer_tap_regulator, "control_side", "regulated_object", - [("transformer", "tap_side"), ("three_winding_transformer", "tap_side")], + [(ComponentType.transformer, "tap_side"), (ComponentType.three_winding_transformer, "tap_side")], ) return errors diff --git a/tests/unit/test_dataset_definitions.py b/tests/unit/test_dataset_definitions.py index ff2536d22..6f6ec92e8 100644 --- a/tests/unit/test_dataset_definitions.py +++ b/tests/unit/test_dataset_definitions.py @@ -4,21 +4,20 @@ import pytest -from power_grid_model import power_grid_meta_data -from power_grid_model.dataset_definitions import ComponentTypes, DataTypes +from power_grid_model import ComponentType, DataType, power_grid_meta_data def test_power_grid_data_types(): power_grid_data_types = [data_type for data_type in power_grid_meta_data] - gen_power_grid_data_types = [member.value for member in DataTypes] + gen_power_grid_data_types = [member.value for member in DataType] power_grid_data_types.sort() gen_power_grid_data_types.sort() assert power_grid_data_types == gen_power_grid_data_types def test_power_grid_components(): - power_grid_components = [component for component in power_grid_meta_data["input"]] - gen_power_grid_components = [member.value for member in ComponentTypes] + power_grid_components = [component for component in power_grid_meta_data[DataType.input]] + gen_power_grid_components = [member.value for member in ComponentType] power_grid_components.sort() gen_power_grid_components.sort() assert power_grid_components == gen_power_grid_components diff --git a/tests/unit/test_power_grid_model.py b/tests/unit/test_power_grid_model.py index 17d63418c..e197c4bc2 100644 --- a/tests/unit/test_power_grid_model.py +++ b/tests/unit/test_power_grid_model.py @@ -8,8 +8,7 @@ import numpy as np import pytest -from power_grid_model import PowerGridModel, initialize_array -from power_grid_model.dataset_definitions import ComponentType +from power_grid_model import ComponentType, PowerGridModel, initialize_array from power_grid_model.errors import PowerGridBatchError, PowerGridError from power_grid_model.validation import assert_valid_input_data @@ -78,12 +77,12 @@ def test_simple_update(model: PowerGridModel, case_data): update_batch = case_data["update_batch"] source_indptr = update_batch["source"]["indptr"] source_update = update_batch["source"]["data"] - update_data: Dict[ComponentType, np.ndarray] = { + update_data = { "source": source_update[source_indptr[0] : source_indptr[1]], "sym_load": update_batch["sym_load"][0, :], } model.update(update_data=update_data) - expected_result: Dict[ComponentType, np.ndarray] = {"node": case_data["output_batch"]["node"][0, :]} + expected_result = {ComponentType.node: case_data["output_batch"]["node"][0, :]} result = model.calculate_power_flow() compare_result(result, expected_result, rtol=0.0, atol=1e-8) @@ -91,7 +90,7 @@ def test_simple_update(model: PowerGridModel, case_data): def test_update_error(model: PowerGridModel): load_update = initialize_array("update", "sym_load", 1) load_update["id"] = 5 - update_data: Dict[ComponentType, np.ndarray] = {"sym_load": load_update} + update_data = {"sym_load": load_update} with pytest.raises(PowerGridError, match="The id cannot be found:"): model.update(update_data=update_data) @@ -155,9 +154,7 @@ def test_batch_calculation_error_continue(model: PowerGridModel, case_data): assert "The id cannot be found:" in error.error_messages[0] # assert value result for scenario 0 result = {"node": result["node"][error.succeeded_scenarios, :]} - expected_result: Dict[ComponentType, np.ndarray] = { - "node": case_data["output_batch"]["node"][error.succeeded_scenarios, :] - } + expected_result = {ComponentType.node: case_data["output_batch"]["node"][error.succeeded_scenarios, :]} compare_result(result, expected_result, rtol=0.0, atol=1e-8) # general error before the batch with pytest.raises(PowerGridError, match="The calculation method is invalid for this calculation!"): diff --git a/tests/unit/test_serialization.py b/tests/unit/test_serialization.py index 1636a1383..05115bdf1 100644 --- a/tests/unit/test_serialization.py +++ b/tests/unit/test_serialization.py @@ -9,8 +9,8 @@ import numpy as np import pytest +from power_grid_model import ComponentType from power_grid_model.core.power_grid_dataset import get_dataset_type -from power_grid_model.dataset_definitions import ComponentType from power_grid_model.utils import json_deserialize, json_serialize, msgpack_deserialize, msgpack_serialize diff --git a/tests/unit/utils.py b/tests/unit/utils.py index 9be1636e5..ebc33be38 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -11,9 +11,9 @@ import numpy as np import pytest +from power_grid_model import DataType from power_grid_model.core.power_grid_model import PowerGridModel from power_grid_model.data_types import Dataset, PythonDataset, SingleDataset -from power_grid_model.dataset_definitions import DataType from power_grid_model.errors import ( AutomaticTapCalculationError, ConflictID, diff --git a/tests/unit/validation/test_validation_functions.py b/tests/unit/validation/test_validation_functions.py index e3d598066..ba41b66d8 100644 --- a/tests/unit/validation/test_validation_functions.py +++ b/tests/unit/validation/test_validation_functions.py @@ -10,6 +10,7 @@ import pytest from power_grid_model import CalculationType, LoadGenType, MeasuredTerminalType, initialize_array, power_grid_meta_data +from power_grid_model.core.dataset_definitions import ComponentType, DataType from power_grid_model.enum import Branch3Side, BranchSide, CalculationType, FaultType, TapChangingStrategy from power_grid_model.validation import assert_valid_input_data from power_grid_model.validation.errors import ( @@ -33,7 +34,7 @@ validate_values, ) -NaN = power_grid_meta_data["input"]["node"].nans["id"] +NaN = power_grid_meta_data[DataType.input][ComponentType.node].nans["id"] def test_assert_valid_data_structure(): From 8c8ef5cc14fe58ac67c6f07eb0c0c3a8f01ef719 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 25 Jun 2024 13:25:33 +0200 Subject: [PATCH 26/47] Quick examples updated Signed-off-by: Santiago Figueroa --- scripts/quick_example.py | 22 +++++++++++----------- scripts/quick_example_batch.py | 30 +++++++++++++++--------------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/scripts/quick_example.py b/scripts/quick_example.py index d378f6ca8..acff9294b 100644 --- a/scripts/quick_example.py +++ b/scripts/quick_example.py @@ -7,15 +7,15 @@ import numpy as np import pandas as pd -from power_grid_model import ComponentType, LoadGenType, PowerGridModel, initialize_array +from power_grid_model import DataType, ComponentType, LoadGenType, PowerGridModel, initialize_array # node -node = initialize_array("input", "node", 2) +node = initialize_array(DataType.input, ComponentType.node, 2) node["id"] = [1, 2] node["u_rated"] = [10.5e3, 10.5e3] # line -line = initialize_array("input", "line", 1) +line = initialize_array(DataType.input, ComponentType.line, 1) line["id"] = [3] line["from_node"] = [1] line["to_node"] = [2] @@ -28,7 +28,7 @@ line["i_n"] = [1000] # load -sym_load = initialize_array("input", "sym_load", 1) +sym_load = initialize_array(DataType.input, ComponentType.sym_load, 1) sym_load["id"] = [4] sym_load["node"] = [2] sym_load["status"] = [1] @@ -37,7 +37,7 @@ sym_load["q_specified"] = [0.5e6] # source -source = initialize_array("input", "source", 1) +source = initialize_array(DataType.input, ComponentType.source, 1) source["id"] = [5] source["node"] = [1] source["status"] = [1] @@ -45,10 +45,10 @@ # input_data input_data = { - "node": node, - "line": line, - "sym_load": sym_load, - "source": source, + ComponentType.node: node, + ComponentType.line: line, + ComponentType.sym_load: sym_load, + ComponentType.source: source, } # call constructor @@ -57,6 +57,6 @@ result = model.calculate_power_flow() print("Node Input") -print(pd.DataFrame(input_data["node"])) +print(pd.DataFrame(input_data[ComponentType.node])) print("Node Result") -print(pd.DataFrame(result["node"])) +print(pd.DataFrame(result[ComponentType.node])) diff --git a/scripts/quick_example_batch.py b/scripts/quick_example_batch.py index 47a1ff7cb..df7692d42 100644 --- a/scripts/quick_example_batch.py +++ b/scripts/quick_example_batch.py @@ -7,7 +7,7 @@ import numpy as np import pandas as pd -from power_grid_model import ComponentType, LoadGenType, PowerGridModel, initialize_array +from power_grid_model import ComponentType, DataType, LoadGenType, PowerGridModel, initialize_array """ node_1 ---line_3--- node_2 ---line_6--- node_7 @@ -17,12 +17,12 @@ """ # node -node = initialize_array("input", "node", 3) +node = initialize_array(DataType.input, ComponentType.node, 3) node["id"] = [1, 2, 7] node["u_rated"] = [10.5e3, 10.5e3, 10.5e3] # line -line = initialize_array("input", "line", 2) +line = initialize_array(DataType.input, ComponentType.line, 2) line["id"] = [3, 6] line["from_node"] = [1, 2] line["to_node"] = [2, 7] @@ -38,7 +38,7 @@ line["tan0"] = [0.0, 0.0] # zero sequence parameters # load -asym_load = initialize_array("input", "asym_load", 2) +asym_load = initialize_array(DataType.input, ComponentType.asym_load, 2) asym_load["id"] = [4, 8] asym_load["node"] = [2, 7] asym_load["status"] = [1, 1] @@ -49,7 +49,7 @@ asym_load["q_specified"] = [[0.5e6, 0.0, 0.0], [0.0, 0.2e6, 0.0]] # input for three phase per entry # source -source = initialize_array("input", "source", 1) +source = initialize_array(DataType.input, ComponentType.source, 1) source["id"] = [5] source["node"] = [1] source["status"] = [1] @@ -57,10 +57,10 @@ # input_data input_data = { - "node": node, - "line": line, - "asym_load": asym_load, - "source": source, + ComponentType.node: node, + ComponentType.line: line, + ComponentType.asym_load: asym_load, + ComponentType.source: source, } # call constructor @@ -69,18 +69,18 @@ result = model.calculate_power_flow(symmetric=False) print("Node Input") -print(pd.DataFrame(input_data["node"])) +print(pd.DataFrame(input_data[ComponentType.node])) print("Node Result") -print(result["node"]["u"]) # N*3 array, in symmetric calculation is this N array -print(result["asym_load"]["p"]) # N*3 array, in symmetric calculation is this N array +print(result[ComponentType.node]["u"]) # N*3 array, in symmetric calculation is this N array +print(result[ComponentType.asym_load]["p"]) # N*3 array, in symmetric calculation is this N array # batch calculation scaler = np.linspace(0, 1, 1000) batch_p = asym_load["p_specified"].reshape(1, 2, 3) * scaler.reshape(-1, 1, 1) -batch_load = initialize_array("update", "asym_load", (1000, 2)) +batch_load = initialize_array(DataType.update, ComponentType.asym_load, (1000, 2)) batch_load["id"] = [[4, 8]] batch_load["p_specified"] = batch_p -batch_update = {"asym_load": batch_load} +batch_update = {ComponentType.asym_load: batch_load} result = model.calculate_power_flow(symmetric=False, update_data=batch_update) -print(result["node"]["u"].shape) # 1000 (scenarios) *3 (nodes) *3 (phases) +print(result[ComponentType.node]["u"].shape) # 1000 (scenarios) *3 (nodes) *3 (phases) From b85ac0a5bc0a26fcb572b9fd020573aa9c6d5229 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 25 Jun 2024 15:01:40 +0200 Subject: [PATCH 27/47] Updated power flow example Signed-off-by: Santiago Figueroa --- docs/examples/Power Flow Example.ipynb | 82 +++++++++++++------------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/docs/examples/Power Flow Example.ipynb b/docs/examples/Power Flow Example.ipynb index ce94b6c85..bfa15aa74 100644 --- a/docs/examples/Power Flow Example.ipynb +++ b/docs/examples/Power Flow Example.ipynb @@ -51,7 +51,7 @@ " # suppress warning about pyarrow as future required dependency\n", " import pandas as pd\n", "\n", - "from power_grid_model import LoadGenType\n", + "from power_grid_model import LoadGenType, ComponentType, DataType\n", "from power_grid_model import PowerGridModel, CalculationMethod, CalculationType\n", "from power_grid_model import initialize_array" ] @@ -80,12 +80,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(\"input\", \"node\", 3)\n", + "node = initialize_array(DataType.input, ComponentType.node, 3)\n", "node[\"id\"] = np.array([1, 2, 6])\n", "node[\"u_rated\"] = [10.5e3, 10.5e3, 10.5e3]\n", "\n", "# line\n", - "line = initialize_array(\"input\", \"line\", 3)\n", + "line = initialize_array(DataType.input, ComponentType.line, 3)\n", "line[\"id\"] = [3, 5, 8]\n", "line[\"from_node\"] = [1, 2, 1]\n", "line[\"to_node\"] = [2, 6, 6]\n", @@ -98,7 +98,7 @@ "line[\"i_n\"] = [1000, 1000, 1000]\n", "\n", "# load\n", - "sym_load = initialize_array(\"input\", \"sym_load\", 2)\n", + "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 2)\n", "sym_load[\"id\"] = [4, 7]\n", "sym_load[\"node\"] = [2, 6]\n", "sym_load[\"status\"] = [1, 1]\n", @@ -107,7 +107,7 @@ "sym_load[\"q_specified\"] = [5e6, 2e6]\n", "\n", "# source\n", - "source = initialize_array(\"input\", \"source\", 1)\n", + "source = initialize_array(DataType.input, ComponentType.source, 1)\n", "source[\"id\"] = [10]\n", "source[\"node\"] = [1]\n", "source[\"status\"] = [1]\n", @@ -115,10 +115,10 @@ "\n", "# all\n", "input_data = {\n", - " \"node\": node,\n", - " \"line\": line,\n", - " \"sym_load\": sym_load,\n", - " \"source\": source\n", + " ComponentType.node: node,\n", + " ComponentType.line: line,\n", + " ComponentType.sym_load: sym_load,\n", + " ComponentType.source: source\n", "}" ] }, @@ -148,7 +148,7 @@ } ], "source": [ - "print(pd.DataFrame(input_data[\"sym_load\"]))" + "print(pd.DataFrame(input_data[ComponentType.sym_load]))" ] }, { @@ -296,9 +296,9 @@ ], "source": [ "print(\"------node result------\")\n", - "print(pd.DataFrame(output_data[\"node\"]))\n", + "print(pd.DataFrame(output_data[ComponentType.node]))\n", "print(\"------line result------\")\n", - "print(pd.DataFrame(output_data[\"line\"]))" + "print(pd.DataFrame(output_data[ComponentType.line]))" ] }, { @@ -340,12 +340,12 @@ " error_tolerance=1e-8,\n", " max_iterations=20,\n", " calculation_method=CalculationMethod.newton_raphson,\n", - " output_component_types=[\"node\"])\n", + " output_component_types=[ComponentType.node])\n", "\n", "print(\"List of component types in result dataset\")\n", "print(list(output_data.keys()))\n", "print(\"------node result------\")\n", - "print(pd.DataFrame(output_data[\"node\"]))" + "print(pd.DataFrame(output_data[ComponentType.node]))" ] }, { @@ -395,7 +395,7 @@ "source": [ "output_data = model.calculate_power_flow(symmetric=True, calculation_method=CalculationMethod.linear)\n", "print(\"------node result------\")\n", - "print(pd.DataFrame(output_data[\"node\"]))" + "print(pd.DataFrame(output_data[ComponentType.node]))" ] }, { @@ -440,19 +440,19 @@ "metadata": {}, "outputs": [], "source": [ - "update_sym_load = initialize_array(\"update\", \"sym_load\", 2)\n", + "update_sym_load = initialize_array(DataType.update, ComponentType.sym_load, 2)\n", "update_sym_load[\"id\"] = [4, 7] # same ID\n", "update_sym_load[\"p_specified\"] = [30e6, 15e6] # change active power\n", "# leave reactive power the same, no need to specify\n", "\n", - "update_line = initialize_array(\"update\", \"line\", 1)\n", + "update_line = initialize_array(DataType.update, ComponentType.line, 1)\n", "update_line[\"id\"] = [3] # change line ID 3\n", "update_line[\"from_status\"] = [0] # switch off at from side\n", "# leave to-side swiching status the same, no need to specify\n", "\n", "update_data = {\n", - " \"sym_load\": update_sym_load,\n", - " \"line\": update_line\n", + " ComponentType.sym_load: update_sym_load,\n", + " ComponentType.line: update_line\n", "}" ] }, @@ -537,7 +537,7 @@ "source": [ "output_data = model_2.calculate_power_flow()\n", "print(\"------line result------\")\n", - "print(pd.DataFrame(output_data[\"line\"]))" + "print(pd.DataFrame(output_data[ComponentType.line]))" ] }, { @@ -608,7 +608,7 @@ "outputs": [], "source": [ "load_profile = initialize_array(\n", - " \"update\", \"sym_load\", (10, 2)\n", + " DataType.update, ComponentType.sym_load, (10, 2)\n", ") # note the shape of the array, 10 scenarios, 2 objects (loads)\n", "# below is an assignment of shape (1, 2) array to shape (10, 2) array\n", "# the numpy broadcasting rule ensures that the same object ids are repeated 10 times\n", @@ -618,7 +618,7 @@ "# the array is an (10, 2) shape, each row is a scenario, each column is an object\n", "load_profile[\"p_specified\"] = [[30e6, 15e6]] * np.linspace(0, 1, 10).reshape(-1, 1)\n", "\n", - "time_series_mutation = {\"sym_load\": load_profile}\n", + "time_series_mutation = {ComponentType.sym_load: load_profile}\n", "\n", "assert_valid_batch_data(input_data=input_data, update_data=time_series_mutation, calculation_type=CalculationType.power_flow)" ] @@ -659,7 +659,7 @@ ], "source": [ "output_data = model.calculate_power_flow(update_data=time_series_mutation)\n", - "print(output_data[\"line\"][\"i_from\"])" + "print(output_data[ComponentType.line][\"i_from\"])" ] }, { @@ -700,7 +700,7 @@ } ], "source": [ - "print(pd.DataFrame(output_data[\"line\"][0]))" + "print(pd.DataFrame(output_data[ComponentType.line][0]))" ] }, { @@ -730,7 +730,7 @@ } ], "source": [ - "print(output_data[\"line\"][\"i_from\"][:,0])" + "print(output_data[ComponentType.line][\"i_from\"][:,0])" ] }, { @@ -762,7 +762,7 @@ "metadata": {}, "outputs": [], "source": [ - "line_profile = initialize_array(\"update\", \"line\", (3, 3)) # 3 scenarios, 3 objects (lines)\n", + "line_profile = initialize_array(DataType.update, ComponentType.line, (3, 3)) # 3 scenarios, 3 objects (lines)\n", "# below the same broadcasting trick\n", "line_profile[\"id\"] = [[3, 5, 8]]\n", "# fully specify the status of all lines, even it is the same as the base scenario\n", @@ -777,7 +777,7 @@ " [1, 1, 0]\n", "]\n", "\n", - "n_min_1_mutation_update_all = {\"line\": line_profile}\n", + "n_min_1_mutation_update_all = {ComponentType.line: line_profile}\n", "\n", "assert_valid_batch_data(input_data=input_data, update_data=n_min_1_mutation_update_all, calculation_type=CalculationType.power_flow)" ] @@ -809,7 +809,7 @@ ], "source": [ "output_data = model.calculate_power_flow(update_data=n_min_1_mutation_update_all)\n", - "print(output_data[\"line\"][\"i_from\"])" + "print(output_data[ComponentType.line][\"i_from\"])" ] }, { @@ -830,14 +830,14 @@ "metadata": {}, "outputs": [], "source": [ - "line_profile = initialize_array(\"update\", \"line\", (3, 1)) # 3 scenarios, 1 object mutation per scenario\n", + "line_profile = initialize_array(DataType.update, ComponentType.line, (3, 1)) # 3 scenarios, 1 object mutation per scenario\n", "# for each mutation, only one object is specified\n", "line_profile[\"id\"] = [[3], [5], [8]]\n", "# specify only the changed status (switch off) of one line\n", "line_profile[\"from_status\"] = [[0], [0], [0]]\n", "line_profile[\"to_status\"] = [[0], [0], [0]]\n", "\n", - "n_min_1_mutation_update_specific = {\"line\": line_profile}\n", + "n_min_1_mutation_update_specific = {ComponentType.line: line_profile}\n", "\n", "assert_valid_batch_data(input_data=input_data, update_data=n_min_1_mutation_update_specific, calculation_type=CalculationType.power_flow)" ] @@ -869,7 +869,7 @@ ], "source": [ "output_data = model.calculate_power_flow(update_data=n_min_1_mutation_update_specific)\n", - "print(output_data[\"line\"][\"i_from\"])" + "print(output_data[ComponentType.line][\"i_from\"])" ] }, { @@ -957,11 +957,11 @@ "from power_grid_model.errors import PowerGridError, ConflictVoltage\n", "\n", "# node\n", - "node_error = initialize_array(\"input\", \"node\", 2)\n", + "node_error = initialize_array(DataType.input, ComponentType.node, 2)\n", "node_error[\"id\"] = [1, 2]\n", "node_error[\"u_rated\"] = [10.5e3, 150.0e3] # different rated voltages\n", "# line\n", - "line_error = initialize_array(\"input\", \"line\", 1)\n", + "line_error = initialize_array(DataType.input, ComponentType.line, 1)\n", "line_error[\"id\"] = [3]\n", "line_error[\"from_node\"] = [1]\n", "line_error[\"to_node\"] = [2]\n", @@ -974,8 +974,8 @@ "line_error[\"i_n\"] = [1000]\n", "\n", "error_data = {\n", - " \"node\": node_error,\n", - " \"line\": line_error\n", + " ComponentType.node: node_error,\n", + " ComponentType.line: line_error\n", "}\n", "\n", "try:\n", @@ -1017,13 +1017,13 @@ "source": [ "from power_grid_model.errors import IDNotFound\n", "\n", - "line_update_error = initialize_array(\"update\", \"line\", 1)\n", + "line_update_error = initialize_array(DataType.update, ComponentType.line, 1)\n", "line_update_error[\"id\"] = [12345] # non-existing\n", "line_update_error[\"from_status\"] = [1]\n", "\n", "\n", "try:\n", - " model.update(update_data={\"line\": line_update_error})\n", + " model.update(update_data={ComponentType.line: line_update_error})\n", "except IDNotFound as e:\n", " print(e)" ] @@ -1096,8 +1096,8 @@ "metadata": {}, "outputs": [], "source": [ - "time_series_mutation[\"sym_load\"][\"id\"][3] = 1000 # unknown id\n", - "time_series_mutation[\"sym_load\"][\"p_specified\"][7] = 1e100 # large power\n" + "time_series_mutation[ComponentType.sym_load][\"id\"][3] = 1000 # unknown id\n", + "time_series_mutation[ComponentType.sym_load][\"p_specified\"][7] = 1e100 # large power\n" ] }, { @@ -1180,12 +1180,12 @@ "\n", "# print node data for u_pu, note that the data is rubbish for scenario 3 and 7\n", "print(\"Node data with invalid results\")\n", - "print(output_data[\"node\"][\"u_pu\"])\n", + "print(output_data[ComponentType.node][\"u_pu\"])\n", "\n", "# we can print the data with only succeeded scenarios\n", "e = model.batch_error\n", "print(\"Node data with only valid results\")\n", - "print(output_data[\"node\"][\"u_pu\"][e.succeeded_scenarios])" + "print(output_data[ComponentType.node][\"u_pu\"][e.succeeded_scenarios])" ] }, { From dd81626f42ef945a43ff76d5699cc77e2d15c51f Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 25 Jun 2024 15:29:38 +0200 Subject: [PATCH 28/47] Updated state estimation example Signed-off-by: Santiago Figueroa --- docs/examples/State Estimation Example.ipynb | 72 ++++++++++---------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/docs/examples/State Estimation Example.ipynb b/docs/examples/State Estimation Example.ipynb index acc891d31..dc8f9b9e5 100644 --- a/docs/examples/State Estimation Example.ipynb +++ b/docs/examples/State Estimation Example.ipynb @@ -49,7 +49,7 @@ " # suppress warning about pyarrow as future required dependency\n", " import pandas as pd\n", "\n", - "from power_grid_model import LoadGenType\n", + "from power_grid_model import LoadGenType, DataType, ComponentType\n", "from power_grid_model import PowerGridModel, CalculationMethod, CalculationType, MeasuredTerminalType\n", "from power_grid_model import initialize_array" ] @@ -77,12 +77,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(\"input\", \"node\", 3)\n", + "node = initialize_array(DataType.input, ComponentType.node, 3)\n", "node[\"id\"] = [1, 2, 6]\n", "node[\"u_rated\"] = [10.5e3, 10.5e3, 10.5e3]\n", "\n", "# line\n", - "line = initialize_array(\"input\", \"line\", 3)\n", + "line = initialize_array(DataType.input, ComponentType.line, 3)\n", "line[\"id\"] = [3, 5, 8]\n", "line[\"from_node\"] = [1, 2, 1]\n", "line[\"to_node\"] = [2, 6, 6]\n", @@ -95,7 +95,7 @@ "line[\"i_n\"] = [1000, 1000, 1000]\n", "\n", "# load\n", - "sym_load = initialize_array(\"input\", \"sym_load\", 2)\n", + "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 2)\n", "sym_load[\"id\"] = [4, 7]\n", "sym_load[\"node\"] = [2, 6]\n", "sym_load[\"status\"] = [1, 1]\n", @@ -104,21 +104,21 @@ "sym_load[\"q_specified\"] = [5e6, 2e6]\n", "\n", "# source\n", - "source = initialize_array(\"input\", \"source\", 1)\n", + "source = initialize_array(DataType.input, ComponentType.source, 1)\n", "source[\"id\"] = [10]\n", "source[\"node\"] = [1]\n", "source[\"status\"] = [1]\n", "source[\"u_ref\"] = [1.0]\n", "\n", "# voltage sensor\n", - "sym_voltage_sensor = initialize_array(\"input\", \"sym_voltage_sensor\", 3)\n", + "sym_voltage_sensor = initialize_array(DataType.input, ComponentType.sym_voltage_sensor, 3)\n", "sym_voltage_sensor[\"id\"] = [11, 12, 13]\n", "sym_voltage_sensor[\"measured_object\"] = [1, 2, 6]\n", "sym_voltage_sensor[\"u_sigma\"] = [1.0, 1.0, 1.0]\n", "sym_voltage_sensor[\"u_measured\"] = [10489.37, 9997.32, 10102.01]\n", "\n", "# power sensor\n", - "sym_power_sensor = initialize_array(\"input\", \"sym_power_sensor\", 8)\n", + "sym_power_sensor = initialize_array(DataType.input, ComponentType.sym_power_sensor, 8)\n", "sym_power_sensor[\"id\"] = [14, 15, 16, 17, 18, 19, 20, 21]\n", "sym_power_sensor[\"measured_object\"] = [3, 3, 5, 5, 8, 8, 4, 6]\n", "sym_power_sensor[\"measured_terminal_type\"] = [\n", @@ -137,12 +137,12 @@ "\n", "# all\n", "input_data = {\n", - " \"node\": node,\n", - " \"line\": line,\n", - " \"sym_load\": sym_load,\n", - " \"source\": source,\n", - " \"sym_voltage_sensor\": sym_voltage_sensor,\n", - " \"sym_power_sensor\": sym_power_sensor,\n", + " ComponentType.node: node,\n", + " ComponentType.line: line,\n", + " ComponentType.sym_load: sym_load,\n", + " ComponentType.source: source,\n", + " ComponentType.sym_voltage_sensor: sym_voltage_sensor,\n", + " ComponentType.sym_power_sensor: sym_power_sensor,\n", "}" ] }, @@ -502,13 +502,13 @@ ], "source": [ "print(\"Node result\")\n", - "display(pd.DataFrame(output_data[\"node\"]))\n", + "display(pd.DataFrame(output_data[ComponentType.node]))\n", "\n", "print(\"Line result\")\n", - "display(pd.DataFrame(output_data[\"line\"]))\n", + "display(pd.DataFrame(output_data[ComponentType.line]))\n", "\n", "print(\"Sym_load result\")\n", - "display(pd.DataFrame(output_data[\"sym_load\"]))" + "display(pd.DataFrame(output_data[ComponentType.sym_load]))" ] }, { @@ -626,7 +626,7 @@ "output_data_NR = model.calculate_state_estimation(symmetric=True, calculation_method=CalculationMethod.newton_raphson)\n", "\n", "print(\"Node result\")\n", - "display(pd.DataFrame(output_data_NR[\"node\"]))" + "display(pd.DataFrame(output_data_NR[ComponentType.node]))" ] }, { @@ -728,7 +728,7 @@ ], "source": [ "# power sensor\n", - "sym_power_sensor2 = initialize_array(\"input\", \"sym_power_sensor\", 8)\n", + "sym_power_sensor2 = initialize_array(DataType.input, ComponentType.sym_power_sensor, 8)\n", "sym_power_sensor2[\"id\"] = [14, 15, 16, 17, 18, 19, 20, 21]\n", "sym_power_sensor2[\"measured_object\"] = [3, 3, 5, 5, 8, 8, 4, 6]\n", "sym_power_sensor2[\"measured_terminal_type\"] = [\n", @@ -748,18 +748,18 @@ "\n", "# all\n", "input_data2 = {\n", - " \"node\": node,\n", - " \"line\": line,\n", - " \"sym_load\": sym_load,\n", - " \"source\": source,\n", - " \"sym_voltage_sensor\": sym_voltage_sensor,\n", - " \"sym_power_sensor\": sym_power_sensor2,\n", + " ComponentType.node: node,\n", + " ComponentType.line: line,\n", + " ComponentType.sym_load: sym_load,\n", + " ComponentType.source: source,\n", + " ComponentType.sym_voltage_sensor: sym_voltage_sensor,\n", + " ComponentType.sym_power_sensor: sym_power_sensor2,\n", "}\n", "\n", "model2 = PowerGridModel(input_data2)\n", "output_data_NR2 = model2.calculate_state_estimation(symmetric=True, calculation_method=CalculationMethod.newton_raphson)\n", "print(\"Node result\")\n", - "display(pd.DataFrame(output_data_NR2[\"node\"]))" + "display(pd.DataFrame(output_data_NR2[ComponentType.node]))" ] }, { @@ -883,21 +883,21 @@ } ], "source": [ - "sym_voltage_sensor_update = initialize_array(\"update\", \"sym_voltage_sensor\", 1)\n", + "sym_voltage_sensor_update = initialize_array(DataType.update, ComponentType.sym_voltage_sensor, 1)\n", "# for each mutation, only one object is specified\n", "sym_voltage_sensor_update[\"id\"] = 13\n", "sym_voltage_sensor_update[\"u_sigma\"] = np.inf # disable this sensor\n", "\n", - "sym_power_sensor_update = initialize_array(\"update\", \"sym_power_sensor\", 1)\n", + "sym_power_sensor_update = initialize_array(DataType.update, ComponentType.sym_power_sensor, 1)\n", "sym_power_sensor_update[\"id\"] = 21\n", "sym_power_sensor_update[\"power_sigma\"] = np.inf # disable this sensor\n", "\n", - "update_data = {\"sym_voltage_sensor\": sym_voltage_sensor_update, \"sym_power_sensor\": sym_power_sensor_update}\n", + "update_data = {ComponentType.sym_voltage_sensor: sym_voltage_sensor_update, ComponentType.sym_power_sensor: sym_power_sensor_update}\n", "\n", "# Permanent update\n", "model.update(update_data=update_data)\n", "output_data = model.calculate_state_estimation()\n", - "display(pd.DataFrame(output_data[\"node\"]))\n", + "display(pd.DataFrame(output_data[ComponentType.node]))\n", "\n", "# fully reinstigate the original model to prevent this cell from influencing re-runs. A model update would be sufficient as well.\n", "model = PowerGridModel(input_data)" @@ -933,15 +933,15 @@ "source": [ "from power_grid_model.errors import PowerGridError\n", "\n", - "sym_voltage_sensor_update = initialize_array(\"update\", \"sym_voltage_sensor\", 3)\n", + "sym_voltage_sensor_update = initialize_array(DataType.update, ComponentType.sym_voltage_sensor, 3)\n", "sym_voltage_sensor_update[\"id\"] = sym_voltage_sensor[\"id\"]\n", "sym_voltage_sensor_update[\"u_sigma\"] = np.inf # disable all sensors\n", "\n", - "sym_power_sensor_update = initialize_array(\"update\", \"sym_power_sensor\", 8)\n", + "sym_power_sensor_update = initialize_array(DataType.update, ComponentType.sym_power_sensor, 8)\n", "sym_power_sensor_update[\"id\"] = sym_power_sensor[\"id\"]\n", "sym_power_sensor_update[\"power_sigma\"] = np.inf # disable all sensors\n", "\n", - "update_data = {\"sym_voltage_sensor\": sym_voltage_sensor_update, \"sym_power_sensor\": sym_power_sensor_update}\n", + "update_data = {ComponentType.sym_voltage_sensor: sym_voltage_sensor_update, ComponentType.sym_power_sensor: sym_power_sensor_update}\n", "\n", "try:\n", " # Permanent update\n", @@ -984,7 +984,7 @@ "outputs": [], "source": [ "sym_voltage_sensor_update = initialize_array(\n", - " \"update\", \"sym_voltage_sensor\", (4, 3)\n", + " DataType.update, ComponentType.sym_voltage_sensor, (4, 3)\n", ") # 4 scenarios, 3 objects per scenario\n", "# for each mutation, only one object is specified\n", "sym_voltage_sensor_update[\"id\"] = [[11, 12, 13]] * 4\n", @@ -996,7 +996,7 @@ "]\n", "sym_voltage_sensor_update[\"u_sigma\"][2, 2] = np.inf # disable the third sensor of the third scenario\n", "\n", - "sym_power_sensor_update = initialize_array(\"update\", \"sym_power_sensor\", (4, 1))\n", + "sym_power_sensor_update = initialize_array(DataType.update, ComponentType.sym_power_sensor, (4, 1))\n", "sym_power_sensor_update[\"id\"] = [21]\n", "sym_power_sensor_update[\"power_sigma\"] = [\n", " [1.0e3],\n", @@ -1005,7 +1005,7 @@ " [np.inf], # disables this sensor on the last scenario\n", "]\n", "\n", - "update_data = {\"sym_voltage_sensor\": sym_voltage_sensor_update, \"sym_power_sensor\": sym_power_sensor_update}" + "update_data = {ComponentType.sym_voltage_sensor: sym_voltage_sensor_update, ComponentType.sym_power_sensor: sym_power_sensor_update}" ] }, { @@ -1065,7 +1065,7 @@ " max_iterations=20,\n", " calculation_method=CalculationMethod.iterative_linear,\n", ")\n", - "print(batch_output_data[\"node\"][\"u\"])" + "print(batch_output_data[ComponentType.node][\"u\"])" ] } ], From ac6c8915a43ec120f65d0eedb72f1882a7839fd3 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 25 Jun 2024 15:41:59 +0200 Subject: [PATCH 29/47] Updated short circuit example Signed-off-by: Santiago Figueroa --- docs/examples/Short Circuit Example.ipynb | 34 +++++++++++------------ 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/docs/examples/Short Circuit Example.ipynb b/docs/examples/Short Circuit Example.ipynb index 64fb7219d..05376916b 100644 --- a/docs/examples/Short Circuit Example.ipynb +++ b/docs/examples/Short Circuit Example.ipynb @@ -54,7 +54,7 @@ " # suppress warning about pyarrow as future required dependency\n", " import pandas as pd\n", "\n", - "from power_grid_model import LoadGenType\n", + "from power_grid_model import LoadGenType, ComponentType, DataType\n", "from power_grid_model import PowerGridModel, CalculationMethod, CalculationType, FaultType, FaultPhase, ShortCircuitVoltageScaling\n", "from power_grid_model import initialize_array" ] @@ -82,12 +82,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(\"input\", \"node\", 3)\n", + "node = initialize_array(DataType.input, ComponentType.node, 3)\n", "node[\"id\"] = np.array([1, 2, 6])\n", "node[\"u_rated\"] = [10.5e3, 10.5e3, 10.5e3]\n", "\n", "# line\n", - "line = initialize_array(\"input\", \"line\", 3)\n", + "line = initialize_array(DataType.input, ComponentType.line, 3)\n", "line[\"id\"] = [3, 5, 8]\n", "line[\"from_node\"] = [1, 2, 1]\n", "line[\"to_node\"] = [2, 6, 6]\n", @@ -100,7 +100,7 @@ "line[\"i_n\"] = [1000, 1000, 1000]\n", "\n", "# load\n", - "sym_load = initialize_array(\"input\", \"sym_load\", 2)\n", + "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 2)\n", "sym_load[\"id\"] = [4, 7]\n", "sym_load[\"node\"] = [2, 6]\n", "sym_load[\"status\"] = [1, 1]\n", @@ -109,14 +109,14 @@ "sym_load[\"q_specified\"] = [5e6, 2e6]\n", "\n", "# source\n", - "source = initialize_array(\"input\", \"source\", 1)\n", + "source = initialize_array(DataType.input, ComponentType.source, 1)\n", "source[\"id\"] = [10]\n", "source[\"node\"] = [1]\n", "source[\"status\"] = [1]\n", "source[\"u_ref\"] = [1.0]\n", "\n", "# fault\n", - "fault = initialize_array(\"input\", \"fault\", 1)\n", + "fault = initialize_array(DataType.input, ComponentType.fault, 1)\n", "fault[\"id\"] = [11]\n", "fault[\"status\"] = [1]\n", "fault[\"fault_object\"] = [6]\n", @@ -127,11 +127,11 @@ "\n", "# all\n", "input_data = {\n", - " \"node\": node,\n", - " \"line\": line,\n", - " \"sym_load\": sym_load,\n", - " \"source\": source,\n", - " \"fault\": fault\n", + " ComponentType.node: node,\n", + " ComponentType.line: line,\n", + " ComponentType.sym_load: sym_load,\n", + " ComponentType.source: source,\n", + " ComponentType.fault: fault\n", "}" ] }, @@ -284,19 +284,19 @@ ], "source": [ "print(\"\\n------fault result: id------\")\n", - "print(output_data[\"fault\"][\"id\"])\n", + "print(output_data[ComponentType.fault][\"id\"])\n", "print(\"------fault result: i_f------\")\n", - "print(output_data[\"fault\"][\"i_f\"])\n", + "print(output_data[ComponentType.fault][\"i_f\"])\n", "\n", "print(\"\\n------node result: id------\")\n", - "print(output_data[\"node\"][\"id\"])\n", + "print(output_data[ComponentType.node][\"id\"])\n", "print(\"------node result: u_pu------\")\n", - "print(output_data[\"node\"][\"u_pu\"])\n", + "print(output_data[ComponentType.node][\"u_pu\"])\n", "\n", "print(\"\\n------line result: id------\")\n", - "print(output_data[\"line\"][\"id\"])\n", + "print(output_data[ComponentType.line][\"id\"])\n", "print(\"------line result: u_pu------\")\n", - "print(output_data[\"line\"][\"i_from\"])\n" + "print(output_data[ComponentType.line][\"i_from\"])\n" ] }, { From e1b32a6261f6f649f6081390f72b49fa8fde0427 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 25 Jun 2024 15:59:03 +0200 Subject: [PATCH 30/47] Updated asymmetric calculation example Signed-off-by: Santiago Figueroa --- .../Asymmetric Calculation Example.ipynb | 64 +++++++++---------- 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/docs/examples/Asymmetric Calculation Example.ipynb b/docs/examples/Asymmetric Calculation Example.ipynb index 032451a72..26183217f 100644 --- a/docs/examples/Asymmetric Calculation Example.ipynb +++ b/docs/examples/Asymmetric Calculation Example.ipynb @@ -52,7 +52,7 @@ " # suppress warning about pyarrow as future required dependency\n", " import pandas as pd\n", "\n", - "from power_grid_model import LoadGenType\n", + "from power_grid_model import LoadGenType, ComponentType, DataType\n", "from power_grid_model import PowerGridModel, CalculationMethod, CalculationType, MeasuredTerminalType\n", "from power_grid_model import initialize_array" ] @@ -83,12 +83,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(\"input\", \"node\", 3)\n", + "node = initialize_array(DataType.input, ComponentType.node, 3)\n", "node[\"id\"] = np.array([1, 2, 6])\n", "node[\"u_rated\"] = [10.5e3, 10.5e3, 10.5e3]\n", "\n", "# line\n", - "line = initialize_array(\"input\", \"line\", 3)\n", + "line = initialize_array(DataType.input, ComponentType.line, 3)\n", "line[\"id\"] = [3, 5, 8]\n", "line[\"from_node\"] = [1, 2, 1]\n", "line[\"to_node\"] = [2, 6, 6]\n", @@ -105,7 +105,7 @@ "line[\"tan0\"] = [0, 0, 0]\n", "\n", "# sym load\n", - "sym_load = initialize_array(\"input\", \"sym_load\", 1)\n", + "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 1)\n", "sym_load[\"id\"] = [4]\n", "sym_load[\"node\"] = [2]\n", "sym_load[\"status\"] = [1]\n", @@ -114,7 +114,7 @@ "sym_load[\"q_specified\"] = [5e6]\n", "\n", "# asym load\n", - "asym_load = initialize_array(\"input\", \"asym_load\", 1)\n", + "asym_load = initialize_array(DataType.input, ComponentType.asym_load, 1)\n", "asym_load[\"id\"] = [7]\n", "asym_load[\"node\"] = [6]\n", "asym_load[\"status\"] = [1]\n", @@ -123,7 +123,7 @@ "asym_load[\"q_specified\"] = [[0, 8e6, 2e6]] # the 3 phases may have different loads\n", "\n", "# source\n", - "source = initialize_array(\"input\", \"source\", 1)\n", + "source = initialize_array(DataType.input, ComponentType.source, 1)\n", "source[\"id\"] = [10]\n", "source[\"node\"] = [1]\n", "source[\"status\"] = [1]\n", @@ -131,11 +131,11 @@ "\n", "# all\n", "asym_input_data = {\n", - " \"node\": node,\n", - " \"line\": line,\n", - " \"sym_load\": sym_load,\n", - " \"asym_load\": asym_load,\n", - " \"source\": source\n", + " ComponentType.node: node,\n", + " ComponentType.line: line,\n", + " ComponentType.sym_load: sym_load,\n", + " ComponentType.asym_load: asym_load,\n", + " ComponentType.source: source\n", "}" ] }, @@ -209,9 +209,9 @@ ], "source": [ "print(\"------node voltage result------\")\n", - "print(pd.DataFrame(asym_result[\"node\"][\"u\"]))\n", + "print(pd.DataFrame(asym_result[ComponentType.node][\"u\"]))\n", "print(\"------node angle result------\")\n", - "print(pd.DataFrame(asym_result[\"node\"][\"u_angle\"]))" + "print(pd.DataFrame(asym_result[ComponentType.node][\"u_angle\"]))" ] }, { @@ -269,7 +269,7 @@ "outputs": [], "source": [ "# note the shape of the array, 10 scenarios, 1 objects (asymmetric load_7)\n", - "load_profile = initialize_array(\"update\", \"asym_load\", (10, 1)) \n", + "load_profile = initialize_array(DataType.update, ComponentType.asym_load, (10, 1)) \n", "\n", "# this is a scale of asym_load from 0% to 100%------------------\n", "# the array is an (10, 1, 3) shape, which shows (scenario, object, phase).\n", @@ -277,7 +277,7 @@ "load_profile[\"id\"] = [7]\n", "load_profile[\"p_specified\"] = [10e6, 20e6 , 0] * np.linspace(0, 1, 10).reshape(-1, 1, 1)\n", "\n", - "time_series_mutation = {\"asym_load\": load_profile}" + "time_series_mutation = {ComponentType.asym_load: load_profile}" ] }, { @@ -389,7 +389,7 @@ } ], "source": [ - "display(pd.DataFrame(output_data[\"line\"][\"p_from\"][0]))" + "display(pd.DataFrame(output_data[ComponentType.line][\"p_from\"][0]))" ] }, { @@ -519,7 +519,7 @@ } ], "source": [ - "display(pd.DataFrame(output_data[\"line\"][\"i_from\"][:,0]))" + "display(pd.DataFrame(output_data[ComponentType.line][\"i_from\"][:,0]))" ] }, { @@ -541,14 +541,14 @@ "outputs": [], "source": [ "# sym voltage sensor\n", - "sym_voltage_sensor = initialize_array(\"input\", \"sym_voltage_sensor\", 2)\n", + "sym_voltage_sensor = initialize_array(DataType.input, ComponentType.sym_voltage_sensor, 2)\n", "sym_voltage_sensor[\"id\"] = [11, 12]\n", "sym_voltage_sensor[\"measured_object\"] = [1, 2]\n", "sym_voltage_sensor[\"u_sigma\"] = [100, 10]\n", "sym_voltage_sensor[\"u_measured\"] = [6000, 5500]\n", "\n", "# asym voltage sensor\n", - "asym_voltage_sensor = initialize_array(\"input\", \"asym_voltage_sensor\", 1)\n", + "asym_voltage_sensor = initialize_array(DataType.input, ComponentType.asym_voltage_sensor, 1)\n", "asym_voltage_sensor[\"id\"] = [13]\n", "asym_voltage_sensor[\"measured_object\"] = [6]\n", "asym_voltage_sensor[\"u_sigma\"] = [100]\n", @@ -556,7 +556,7 @@ "\n", "\n", "# sym power sensor\n", - "sym_power_sensor = initialize_array(\"input\", \"sym_power_sensor\", 7)\n", + "sym_power_sensor = initialize_array(DataType.input, ComponentType.sym_power_sensor, 7)\n", "sym_power_sensor[\"id\"] = [14, 15, 16, 17, 18, 19, 20]\n", "sym_power_sensor[\"measured_object\"] = [3, 3, 5, 5, 8, 8, 4]\n", "sym_power_sensor[\"measured_terminal_type\"] = [\n", @@ -570,7 +570,7 @@ "sym_power_sensor[\"q_measured\"] = [5e6, -7e6, 2e6, -2e6, 5e6, -5e6, 5e6]\n", "\n", "# asym power sensor\n", - "asym_power_sensor = initialize_array(\"input\", \"asym_power_sensor\", 1)\n", + "asym_power_sensor = initialize_array(DataType.input, ComponentType.asym_power_sensor, 1)\n", "asym_power_sensor[\"id\"] = [21]\n", "asym_power_sensor[\"measured_object\"] = [6]\n", "asym_power_sensor[\"measured_terminal_type\"] = [MeasuredTerminalType.node]\n", @@ -580,15 +580,15 @@ "\n", "# all \n", "asym_input_data = {\n", - " \"node\": node,\n", - " \"line\": line,\n", - " \"sym_load\": sym_load,\n", - " \"asym_load\": asym_load,\n", - " \"source\": source,\n", - " \"sym_voltage_sensor\": sym_voltage_sensor,\n", - " \"asym_voltage_sensor\": asym_voltage_sensor,\n", - " \"sym_power_sensor\": sym_power_sensor,\n", - " \"asym_power_sensor\": asym_power_sensor,\n", + " ComponentType.node: node,\n", + " ComponentType.line: line,\n", + " ComponentType.sym_load: sym_load,\n", + " ComponentType.asym_load: asym_load,\n", + " ComponentType.source: source,\n", + " ComponentType.sym_voltage_sensor: sym_voltage_sensor,\n", + " ComponentType.asym_voltage_sensor: asym_voltage_sensor,\n", + " ComponentType.sym_power_sensor: sym_power_sensor,\n", + " ComponentType.asym_power_sensor: asym_power_sensor,\n", "}" ] }, @@ -788,10 +788,10 @@ ], "source": [ "print(\"------node voltage result------\")\n", - "display(pd.DataFrame(asym_result[\"sym_voltage_sensor\"][\"u_residual\"]))\n", + "display(pd.DataFrame(asym_result[ComponentType.sym_voltage_sensor][\"u_residual\"]))\n", "\n", "print(\"------sym_load result------\")\n", - "display(pd.DataFrame(asym_result[\"sym_power_sensor\"][\"p_residual\"]))" + "display(pd.DataFrame(asym_result[ComponentType.sym_power_sensor][\"p_residual\"]))" ] }, { From 0726a90dbdd3231d509696733125ac146e90c9ac Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 25 Jun 2024 16:48:19 +0200 Subject: [PATCH 31/47] Updated make test dataset example. Signed-off-by: Santiago Figueroa --- docs/examples/Make Test Dataset.ipynb | 16 ++++++++-------- src/power_grid_model/core/dataset_definitions.py | 2 +- src/power_grid_model/core/power_grid_dataset.py | 2 +- src/power_grid_model/utils.py | 3 ++- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/docs/examples/Make Test Dataset.ipynb b/docs/examples/Make Test Dataset.ipynb index 3b47b1356..a22c41ee3 100644 --- a/docs/examples/Make Test Dataset.ipynb +++ b/docs/examples/Make Test Dataset.ipynb @@ -167,19 +167,19 @@ " # suppress warning about pyarrow as future required dependency\n", " import pandas as pd\n", "\n", - "from power_grid_model import LoadGenType\n", + "from power_grid_model import LoadGenType, ComponentType, DataType\n", "from power_grid_model import PowerGridModel\n", "from power_grid_model import initialize_array\n", "\n", "# network\n", "\n", "# node\n", - "node = initialize_array(\"input\", \"node\", 3)\n", + "node = initialize_array(DataType.input, ComponentType.node, 3)\n", "node[\"id\"] = [1, 2, 6]\n", "node[\"u_rated\"] = [10.5e3, 10.5e3, 10.5e3]\n", "\n", "# line\n", - "line = initialize_array(\"input\", \"line\", 3)\n", + "line = initialize_array(DataType.input, \"line\", 3)\n", "line[\"id\"] = [3, 5, 8]\n", "line[\"from_node\"] = [1, 2, 1]\n", "line[\"to_node\"] = [2, 6, 6]\n", @@ -192,7 +192,7 @@ "line[\"i_n\"] = [1000, 1000, 1000]\n", "\n", "# load\n", - "sym_load = initialize_array(\"input\", \"sym_load\", 2)\n", + "sym_load = initialize_array(DataType.input, \"sym_load\", 2)\n", "sym_load[\"id\"] = [4, 7]\n", "sym_load[\"node\"] = [2, 6]\n", "sym_load[\"status\"] = [1, 1]\n", @@ -201,14 +201,14 @@ "sym_load[\"q_specified\"] = [5e6, 2e6]\n", "\n", "# source\n", - "source = initialize_array(\"input\", \"source\", 1)\n", + "source = initialize_array(DataType.input, \"source\", 1)\n", "source[\"id\"] = [10]\n", "source[\"node\"] = [1]\n", "source[\"status\"] = [1]\n", "source[\"u_ref\"] = [1.0]\n", "\n", "# all\n", - "input_data = {\"node\": node, \"line\": line, \"sym_load\": sym_load, \"source\": source}" + "input_data = {ComponentType.node: node, \"line\": line, \"sym_load\": sym_load, \"source\": source}" ] }, { @@ -318,7 +318,7 @@ "pgm = PowerGridModel(imported_data)\n", "result = pgm.calculate_power_flow()\n", "\n", - "print(pd.DataFrame(result[\"node\"]))" + "print(pd.DataFrame(result[ComponentType.node]))" ] }, { @@ -340,7 +340,7 @@ "source": [ "# create batch set\n", "\n", - "load_profile = initialize_array(\"update\", \"sym_load\", (3, 2))\n", + "load_profile = initialize_array(DataType.update, \"sym_load\", (3, 2))\n", "load_profile[\"id\"] = [[4, 7]]\n", "# this is a scale of load from 0% to 100%\n", "load_profile[\"p_specified\"] = [[30e6, 15e6]] * np.linspace(0, 1, 3).reshape(-1, 1)\n", diff --git a/src/power_grid_model/core/dataset_definitions.py b/src/power_grid_model/core/dataset_definitions.py index 6e3570a38..cce94feb0 100644 --- a/src/power_grid_model/core/dataset_definitions.py +++ b/src/power_grid_model/core/dataset_definitions.py @@ -14,7 +14,7 @@ class DataType(str, Enum): """ - A DataType is the type of a :class:`BatchDataset`. + A DataType is the type of a :class:`Dataset` in power grid model. - Examples: diff --git a/src/power_grid_model/core/power_grid_dataset.py b/src/power_grid_model/core/power_grid_dataset.py index 724eb6c14..30c63e484 100644 --- a/src/power_grid_model/core/power_grid_dataset.py +++ b/src/power_grid_model/core/power_grid_dataset.py @@ -197,7 +197,7 @@ def __new__( instance._mutable_dataset = MutableDatasetPtr() instance._buffer_views = [] - instance._dataset_type = dataset_type if dataset_type in DataType else get_dataset_type(data) + instance._dataset_type = dataset_type if dataset_type in list(DataType) else get_dataset_type(data) instance._schema = power_grid_meta_data[instance._dataset_type] if data: diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index 034ef8500..5b60523fe 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -13,7 +13,7 @@ import numpy as np -from power_grid_model import DataType +from power_grid_model.core.dataset_definitions import DataType, _map_to_componenttypes from power_grid_model._utils import ( get_and_verify_batch_sizes as _get_and_verify_batch_sizes, get_batch_size as _get_batch_size, @@ -124,6 +124,7 @@ def json_serialize_to_file( Returns: Save to file. """ + data = _map_to_componenttypes(data) result = json_serialize( data=data, dataset_type=dataset_type, use_compact_list=use_compact_list, indent=-1 if indent is None else indent ) From 1d98e94c08c4bee6e3240e51293a9d74a97b71f6 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 25 Jun 2024 17:03:42 +0200 Subject: [PATCH 32/47] Updated serialization example. Signed-off-by: Santiago Figueroa --- docs/examples/Serialization Example.ipynb | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/examples/Serialization Example.ipynb b/docs/examples/Serialization Example.ipynb index 183974d1c..ce01b1f2b 100644 --- a/docs/examples/Serialization Example.ipynb +++ b/docs/examples/Serialization Example.ipynb @@ -30,7 +30,7 @@ " # suppress warning about pyarrow as future required dependency\n", " from pandas import DataFrame\n", "\n", - "from power_grid_model import PowerGridModel\n", + "from power_grid_model import PowerGridModel, ComponentType\n", "from power_grid_model.utils import json_deserialize, json_serialize" ] }, @@ -185,7 +185,7 @@ "dataset = json_deserialize(data)\n", "\n", "print(\"components:\", dataset.keys())\n", - "display(DataFrame(dataset[\"node\"]))" + "display(DataFrame(dataset[ComponentType.node]))" ] }, { @@ -285,7 +285,7 @@ "model = PowerGridModel(dataset)\n", "output = model.calculate_power_flow()\n", "\n", - "display(DataFrame(output[\"node\"]))" + "display(DataFrame(output[ComponentType.node]))" ] }, { @@ -587,9 +587,9 @@ "output_data_compact = msgpack_deserialize(msgpack_data_compact)\n", "\n", "print(\"----Node result from not compact data----\")\n", - "print(DataFrame(output_data_not_compact[\"node\"]))\n", + "print(DataFrame(output_data_not_compact[ComponentType.node]))\n", "print(\"----Node result from compact data----\")\n", - "print(DataFrame(output_data_compact[\"node\"]))" + "print(DataFrame(output_data_compact[ComponentType.node]))" ] } ], From 500daf9293e6cd2e136d3e140a3a05638601eaac Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 25 Jun 2024 17:26:01 +0200 Subject: [PATCH 33/47] Updated transformer example. Signed-off-by: Santiago Figueroa --- docs/examples/Transformer Examples.ipynb | 124 +++++++++++------------ 1 file changed, 62 insertions(+), 62 deletions(-) diff --git a/docs/examples/Transformer Examples.ipynb b/docs/examples/Transformer Examples.ipynb index 2b232e285..3fb82b7b0 100644 --- a/docs/examples/Transformer Examples.ipynb +++ b/docs/examples/Transformer Examples.ipynb @@ -44,7 +44,7 @@ " # suppress warning about pyarrow as future required dependency\n", " import pandas as pd\n", "\n", - "from power_grid_model import LoadGenType\n", + "from power_grid_model import LoadGenType, DataType, ComponentType\n", "from power_grid_model import (\n", " PowerGridModel,\n", " CalculationMethod,\n", @@ -77,12 +77,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(\"input\", \"node\", 2)\n", + "node = initialize_array(DataType.input, ComponentType.node, 2)\n", "node[\"id\"] = np.array([2, 4])\n", "node[\"u_rated\"] = [1e4, 4e2]\n", "\n", "# load\n", - "sym_load = initialize_array(\"input\", \"sym_load\", 1)\n", + "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 1)\n", "sym_load[\"id\"] = [5]\n", "sym_load[\"node\"] = [4]\n", "sym_load[\"status\"] = [1]\n", @@ -91,14 +91,14 @@ "sym_load[\"q_specified\"] = [5e3]\n", "\n", "# source\n", - "source = initialize_array(\"input\", \"source\", 1)\n", + "source = initialize_array(DataType.input, ComponentType.source, 1)\n", "source[\"id\"] = [1]\n", "source[\"node\"] = [2]\n", "source[\"status\"] = [1]\n", "source[\"u_ref\"] = [1.0]\n", "\n", "# transformer\n", - "transformer = initialize_array(\"input\", \"transformer\", 1)\n", + "transformer = initialize_array(DataType.input, ComponentType.transformer, 1)\n", "transformer[\"id\"] = [3]\n", "transformer[\"from_node\"] = [2]\n", "transformer[\"to_node\"] = [4]\n", @@ -121,7 +121,7 @@ "transformer[\"tap_size\"] = [100]\n", "\n", "# all\n", - "input_data = {\"node\": node, \"transformer\": transformer, \"sym_load\": sym_load, \"source\": source}" + "input_data = {ComponentType.node: node, ComponentType.transformer: transformer, ComponentType.sym_load: sym_load, ComponentType.source: source}" ] }, { @@ -156,7 +156,7 @@ } ], "source": [ - "print(pd.DataFrame(input_data[\"transformer\"]))" + "print(pd.DataFrame(input_data[ComponentType.transformer]))" ] }, { @@ -208,7 +208,7 @@ "\n", "# result dataset\n", "print(\"------node result------\")\n", - "print(pd.DataFrame(output_data[\"node\"]))" + "print(pd.DataFrame(output_data[ComponentType.node]))" ] }, { @@ -248,14 +248,14 @@ ], "source": [ "# voltage sensor\n", - "sym_voltage_sensor = initialize_array(\"input\", \"sym_voltage_sensor\", 2)\n", + "sym_voltage_sensor = initialize_array(DataType.input, ComponentType.sym_voltage_sensor, 2)\n", "sym_voltage_sensor[\"id\"] = [6, 7]\n", "sym_voltage_sensor[\"measured_object\"] = [2, 4]\n", "sym_voltage_sensor[\"u_sigma\"] = [1.0, 1.0]\n", "sym_voltage_sensor[\"u_measured\"] = [1e5, 4e2]\n", "\n", "# power sensor\n", - "sym_power_sensor = initialize_array(\"input\", \"sym_power_sensor\", 2)\n", + "sym_power_sensor = initialize_array(DataType.input, ComponentType.sym_power_sensor, 2)\n", "sym_power_sensor[\"id\"] = [8, 9]\n", "sym_power_sensor[\"measured_object\"] = [3, 3]\n", "sym_power_sensor[\"measured_terminal_type\"] = [\n", @@ -268,12 +268,12 @@ "\n", "# use components from former input dataset cell.\n", "input_data2 = {\n", - " \"node\": node,\n", - " \"transformer\": transformer,\n", - " \"sym_load\": sym_load,\n", - " \"source\": source,\n", - " \"sym_voltage_sensor\": sym_voltage_sensor,\n", - " \"sym_power_sensor\": sym_power_sensor,\n", + " ComponentType.node: node,\n", + " ComponentType.transformer: transformer,\n", + " ComponentType.sym_load: sym_load,\n", + " ComponentType.source: source,\n", + " ComponentType.sym_voltage_sensor: sym_voltage_sensor,\n", + " ComponentType.sym_power_sensor: sym_power_sensor,\n", "}\n", "\n", "# validation (optional)\n", @@ -291,7 +291,7 @@ "\n", "# result dataset\n", "print(\"------node result------\")\n", - "print(pd.DataFrame(output_data2[\"node\"]))" + "print(pd.DataFrame(output_data2[ComponentType.node]))" ] }, { @@ -336,12 +336,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(\"input\", \"node\", 3)\n", + "node = initialize_array(DataType.input, ComponentType.node, 3)\n", "node[\"id\"] = np.array([2, 4, 6])\n", "node[\"u_rated\"] = [1e4, 1e2, 1e2]\n", "\n", "# load\n", - "sym_load = initialize_array(\"input\", \"sym_load\", 2)\n", + "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 2)\n", "sym_load[\"id\"] = [5, 7]\n", "sym_load[\"node\"] = [4, 6]\n", "sym_load[\"status\"] = [1]\n", @@ -350,14 +350,14 @@ "sym_load[\"q_specified\"] = [5e3, 5e3]\n", "\n", "# source\n", - "source = initialize_array(\"input\", \"source\", 1)\n", + "source = initialize_array(DataType.input, ComponentType.source, 1)\n", "source[\"id\"] = [1]\n", "source[\"node\"] = [2]\n", "source[\"status\"] = [1]\n", "source[\"u_ref\"] = [1.0]\n", "\n", "# three-winding transformer\n", - "three_winding_transformer = initialize_array(\"input\", \"three_winding_transformer\", 1)\n", + "three_winding_transformer = initialize_array(DataType.input, ComponentType.three_winding_transformer, 1)\n", "three_winding_transformer[\"id\"] = [3]\n", "three_winding_transformer[\"node_1\"] = [2]\n", "three_winding_transformer[\"node_2\"] = [4]\n", @@ -393,10 +393,10 @@ "\n", "# all\n", "input_data3 = {\n", - " \"node\": node,\n", - " \"three_winding_transformer\": three_winding_transformer,\n", - " \"sym_load\": sym_load,\n", - " \"source\": source,\n", + " ComponentType.node: node,\n", + " ComponentType.three_winding_transformer: three_winding_transformer,\n", + " ComponentType.sym_load: sym_load,\n", + " ComponentType.source: source,\n", "}" ] }, @@ -447,7 +447,7 @@ "\n", "# result dataset\n", "print(\"------node result------\")\n", - "print(pd.DataFrame(output_data3[\"node\"]))" + "print(pd.DataFrame(output_data3[ComponentType.node]))" ] }, { @@ -488,14 +488,14 @@ ], "source": [ "# voltage sensor\n", - "sym_voltage_sensor = initialize_array(\"input\", \"sym_voltage_sensor\", 3)\n", + "sym_voltage_sensor = initialize_array(DataType.input, ComponentType.sym_voltage_sensor, 3)\n", "sym_voltage_sensor[\"id\"] = [8, 9, 10]\n", "sym_voltage_sensor[\"measured_object\"] = [2, 4, 6]\n", "sym_voltage_sensor[\"u_sigma\"] = [1.0, 1.0, 1.0]\n", "sym_voltage_sensor[\"u_measured\"] = [1e4, 1e2, 1e2]\n", "\n", "# power sensor\n", - "sym_power_sensor = initialize_array(\"input\", \"sym_power_sensor\", 3)\n", + "sym_power_sensor = initialize_array(DataType.input, ComponentType.sym_power_sensor, 3)\n", "sym_power_sensor[\"id\"] = [11, 12, 13]\n", "sym_power_sensor[\"measured_object\"] = [3, 3, 3]\n", "sym_power_sensor[\"measured_terminal_type\"] = [\n", @@ -509,12 +509,12 @@ "\n", "# use components from the one-time power flow calculation with three-winding transformer\n", "input_data4 = {\n", - " \"node\": node,\n", - " \"three_winding_transformer\": three_winding_transformer,\n", - " \"sym_load\": sym_load,\n", - " \"source\": source,\n", - " \"sym_voltage_sensor\": sym_voltage_sensor,\n", - " \"sym_power_sensor\": sym_power_sensor,\n", + " ComponentType.node: node,\n", + " ComponentType.three_winding_transformer: three_winding_transformer,\n", + " ComponentType.sym_load: sym_load,\n", + " ComponentType.source: source,\n", + " ComponentType.sym_voltage_sensor: sym_voltage_sensor,\n", + " ComponentType.sym_power_sensor: sym_power_sensor,\n", "}\n", "\n", "# validation (optional)\n", @@ -532,7 +532,7 @@ "\n", "# result dataset\n", "print(\"------three-winding transformer result------\")\n", - "print(pd.DataFrame(output_data4[\"three_winding_transformer\"]))" + "print(pd.DataFrame(output_data4[ComponentType.three_winding_transformer]))" ] }, { @@ -573,12 +573,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(\"input\", \"node\", 3)\n", + "node = initialize_array(DataType.input, ComponentType.node, 3)\n", "node[\"id\"] = [2, 4, 6]\n", "node[\"u_rated\"] = [1e4, 4e2, 4e2]\n", "\n", "# load\n", - "sym_load = initialize_array(\"input\", \"sym_load\", 1)\n", + "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 1)\n", "sym_load[\"id\"] = [7]\n", "sym_load[\"node\"] = [6]\n", "sym_load[\"status\"] = [1]\n", @@ -587,14 +587,14 @@ "sym_load[\"q_specified\"] = [5e3]\n", "\n", "# source\n", - "source = initialize_array(\"input\", \"source\", 1)\n", + "source = initialize_array(DataType.input, ComponentType.source, 1)\n", "source[\"id\"] = [1]\n", "source[\"node\"] = [2]\n", "source[\"status\"] = [1]\n", "source[\"u_ref\"] = [1.0]\n", "\n", "# line\n", - "line = initialize_array(\"input\", \"line\", 1)\n", + "line = initialize_array(DataType.input, ComponentType.line, 1)\n", "line[\"id\"] = [5]\n", "line[\"from_node\"] = [4]\n", "line[\"to_node\"] = [6]\n", @@ -606,7 +606,7 @@ "line[\"tan1\"] = [0.0]\n", "\n", "# transformer\n", - "transformer = initialize_array(\"input\", \"transformer\", 1)\n", + "transformer = initialize_array(DataType.input, ComponentType.transformer, 1)\n", "transformer[\"id\"] = [3]\n", "transformer[\"from_node\"] = [2]\n", "transformer[\"to_node\"] = [4]\n", @@ -629,7 +629,7 @@ "transformer[\"tap_size\"] = [100]\n", "\n", "# transformer tap regulator\n", - "transformer_tap_regulator = initialize_array(\"input\", \"transformer_tap_regulator\", 1)\n", + "transformer_tap_regulator = initialize_array(DataType.input, ComponentType.transformer_tap_regulator, 1)\n", "transformer_tap_regulator[\"id\"] = [8]\n", "transformer_tap_regulator[\"regulated_object\"] = [3]\n", "transformer_tap_regulator[\"status\"] = [1]\n", @@ -641,12 +641,12 @@ "\n", "# all\n", "input_data5 = {\n", - " \"node\": node,\n", - " \"line\": line,\n", - " \"transformer\": transformer,\n", - " \"sym_load\": sym_load,\n", - " \"source\": source,\n", - " \"transformer_tap_regulator\": transformer_tap_regulator,\n", + " ComponentType.node: node,\n", + " ComponentType.line: line,\n", + " ComponentType.transformer: transformer,\n", + " ComponentType.sym_load: sym_load,\n", + " ComponentType.source: source,\n", + " ComponentType.transformer_tap_regulator: transformer_tap_regulator,\n", "}" ] }, @@ -740,7 +740,7 @@ "\n", "# Both load-side nodes node_4 and node_6 have a voltage below 400 V\n", "print(\"------node result------\")\n", - "display(pd.DataFrame(output_data5[\"node\"])[[\"id\", \"u\"]])" + "display(pd.DataFrame(output_data5[ComponentType.node])[[\"id\", \"u\"]])" ] }, { @@ -883,10 +883,10 @@ "\n", "# the node at the control side of the transformer now has a voltage within the specified voltage band\n", "print(\"------node result------\")\n", - "display(pd.DataFrame(output_data6[\"node\"])[[\"id\", \"u\"]])\n", + "display(pd.DataFrame(output_data6[ComponentType.node])[[\"id\", \"u\"]])\n", "\n", "print(\"\\n------tap regulator result------\")\n", - "display(pd.DataFrame(output_data6[\"transformer_tap_regulator\"]))" + "display(pd.DataFrame(output_data6[ComponentType.transformer_tap_regulator]))" ] }, { @@ -1083,14 +1083,14 @@ } ], "source": [ - "transformer_update = initialize_array(\"update\", \"transformer\", (2, 1))\n", + "transformer_update = initialize_array(DataType.update, ComponentType.transformer, (2, 1))\n", "transformer_update[\"id\"] = 3\n", "transformer_update[\"tap_pos\"] = [[0], [1]]\n", "\n", - "update_data = {\"transformer\": transformer_update}\n", + "update_data = {ComponentType.transformer: transformer_update}\n", "\n", "print(\"------transformer batch update------\")\n", - "display(pd.DataFrame(update_data[\"transformer\"][:, 0]))\n", + "display(pd.DataFrame(update_data[ComponentType.transformer][:, 0]))\n", "\n", "# power flow batch calculation with automatic tap changing\n", "output_data = model5.calculate_power_flow(\n", @@ -1098,10 +1098,10 @@ ")\n", "\n", "print(\"------node_4 batch result------\")\n", - "display(pd.DataFrame(output_data[\"node\"][:, 1])[[\"id\", \"u\"]]) # only output node 1\n", + "display(pd.DataFrame(output_data[ComponentType.node][:, 1])[[\"id\", \"u\"]]) # only output node 1\n", "\n", "print(\"\\n------tap regulator batch result------\")\n", - "display(pd.DataFrame(output_data[\"transformer_tap_regulator\"][:, 0]))" + "display(pd.DataFrame(output_data[ComponentType.transformer_tap_regulator][:, 0]))" ] }, { @@ -1241,10 +1241,10 @@ "output_data5 = model5.calculate_power_flow(tap_changing_strategy=TapChangingStrategy.max_voltage_tap)\n", "\n", "print(\"------node result------\")\n", - "display(pd.DataFrame(output_data5[\"node\"])[[\"id\", \"u\"]])\n", + "display(pd.DataFrame(output_data5[ComponentType.node])[[\"id\", \"u\"]])\n", "\n", "print(\"\\n------tap regulator result------\")\n", - "display(pd.DataFrame(output_data5[\"transformer_tap_regulator\"]))" + "display(pd.DataFrame(output_data5[ComponentType.transformer_tap_regulator]))" ] }, { @@ -1384,10 +1384,10 @@ "output_data5 = model5.calculate_power_flow(tap_changing_strategy=TapChangingStrategy.min_voltage_tap)\n", "\n", "print(\"------node result------\")\n", - "display(pd.DataFrame(output_data5[\"node\"])[[\"id\", \"u\"]])\n", + "display(pd.DataFrame(output_data5[ComponentType.node])[[\"id\", \"u\"]])\n", "\n", "print(\"\\n------tap regulator result------\")\n", - "display(pd.DataFrame(output_data5[\"transformer_tap_regulator\"]))" + "display(pd.DataFrame(output_data5[ComponentType.transformer_tap_regulator]))" ] }, { @@ -1421,7 +1421,7 @@ "input_data6 = {component: component_data.copy() for component, component_data in input_data5.items()}\n", "\n", "# set the regulator's line drop compensation\n", - "input_data6[\"transformer_tap_regulator\"][\"line_drop_compensation_r\"] = [10.0]\n", + "input_data6[ComponentType.transformer_tap_regulator][\"line_drop_compensation_r\"] = [10.0]\n", "\n", "# construction\n", "model6 = PowerGridModel(input_data=input_data6)" @@ -1562,10 +1562,10 @@ "output_data6 = model6.calculate_power_flow(tap_changing_strategy=TapChangingStrategy.any_valid_tap)\n", "\n", "print(\"------node result------\")\n", - "display(pd.DataFrame(output_data6[\"node\"])[[\"id\", \"u\"]])\n", + "display(pd.DataFrame(output_data6[ComponentType.node])[[\"id\", \"u\"]])\n", "\n", "print(\"\\n------tap regulator result------\")\n", - "display(pd.DataFrame(output_data6[\"transformer_tap_regulator\"]))" + "display(pd.DataFrame(output_data6[ComponentType.transformer_tap_regulator]))" ] } ], From 049c54ee0074fe9a57b729476adddd320814cd85 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Tue, 25 Jun 2024 17:50:22 +0200 Subject: [PATCH 34/47] Updated validation example. Signed-off-by: Santiago Figueroa --- docs/examples/Validation Examples.ipynb | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/examples/Validation Examples.ipynb b/docs/examples/Validation Examples.ipynb index 2b2c8f126..de2188aea 100644 --- a/docs/examples/Validation Examples.ipynb +++ b/docs/examples/Validation Examples.ipynb @@ -41,14 +41,14 @@ "```python\n", "class ValidationError:\n", " \n", - " # Component(s): e.g. \"node\" or [\"node\", \"line\"]\n", - " component: Union[str, List[str]]\n", + " # Component(s): e.g. ComponentType.node or [ComponentType.node, ComponentType.line]\n", + " component: Union[ComponentType, List[ComponentType]]\n", " \n", - " # Field(s): e.g. \"id\" or [\"line_from\", \"line_to\"] or [(\"node\", \"id\"), (\"line\", \"id\")]\n", - " field: Union[str, List[str], List[Tuple[str, str]]]\n", + " # Field(s): e.g. \"id\" or [\"line_from\", \"line_to\"] or [(ComponentType.node, \"id\"), (ComponentType.line, \"id\")]\n", + " field: Union[str, List[str], List[Tuple[ComponentType, str]]]\n", "\n", - " # IDs: e.g. [1, 2, 3] or [(\"node\", 1), (\"line\", 1)]\n", - " ids: Union[List[int], List[Tuple[str, int]]] = [] \n", + " # IDs: e.g. [1, 2, 3] or [(ComponentType.node, 1), (ComponentType.line, 1)]\n", + " ids: Union[List[int], List[Tuple[ComponentType, int]]] = [] \n", " \n", "```\n", "\n", @@ -62,17 +62,17 @@ "metadata": {}, "outputs": [], "source": [ - "from power_grid_model import PowerGridModel, initialize_array\n", + "from power_grid_model import PowerGridModel, DataType, ComponentType, initialize_array\n", "\n", "# A power grid containing several errors\n", "\n", "# node\n", - "node_error = initialize_array(\"input\", \"node\", 3)\n", + "node_error = initialize_array(DataType.input, ComponentType.node, 3)\n", "node_error[\"id\"] = [1, 2, 3]\n", "node_error[\"u_rated\"] = [10.5e3]\n", "\n", "# line\n", - "line_error = initialize_array(\"input\", \"line\", 3)\n", + "line_error = initialize_array(DataType.input, ComponentType.line, 3)\n", "line_error[\"id\"] = [4, 5, 6]\n", "line_error[\"from_node\"] = [1, 2, 3]\n", "line_error[\"to_node\"] = [2, 3, 4]\n", @@ -84,7 +84,7 @@ "line_error[\"tan1\"] = [0.0]\n", "\n", "# Power Sensor\n", - "sensor_error = initialize_array(\"input\", \"sym_power_sensor\", 2)\n", + "sensor_error = initialize_array(DataType.input, ComponentType.sym_power_sensor, 2)\n", "sensor_error[\"id\"] = [6, 7]\n", "sensor_error[\"measured_object\"] = [3, 4]\n", "sensor_error[\"measured_terminal_type\"] = [0, 2]\n", @@ -92,7 +92,7 @@ "sensor_error[\"q_measured\"] = [0]\n", "sensor_error[\"power_sigma\"] = [0]\n", "\n", - "error_data = {\"node\": node_error, \"line\": line_error, \"sym_power_sensor\": sensor_error}" + "error_data = {ComponentType.node: node_error, ComponentType.line: line_error, ComponentType.sym_power_sensor: sensor_error}" ] }, { From 45f01e7acbe8a8b7ba54d170d8c7f51bf92f1817 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Wed, 26 Jun 2024 10:31:18 +0200 Subject: [PATCH 35/47] All tests pass Signed-off-by: Santiago Figueroa --- .../core/dataset_definitions.py | 4 +- src/power_grid_model/core/power_grid_model.py | 4 +- src/power_grid_model/core/serialization.py | 6 +- src/power_grid_model/utils.py | 3 +- src/power_grid_model/validation/utils.py | 6 +- tests/unit/test_dataset.py | 43 +++-- tests/unit/test_serialization.py | 12 +- .../unit/validation/test_input_validation.py | 177 +++++++++--------- 8 files changed, 143 insertions(+), 112 deletions(-) diff --git a/src/power_grid_model/core/dataset_definitions.py b/src/power_grid_model/core/dataset_definitions.py index cce94feb0..49bb86d7a 100644 --- a/src/power_grid_model/core/dataset_definitions.py +++ b/src/power_grid_model/core/dataset_definitions.py @@ -7,7 +7,7 @@ # This file is automatically generated. DO NOT modify it manually! from enum import Enum -from typing import Any, Dict +from typing import Any, Dict, Mapping # pylint: disable=invalid-name @@ -80,6 +80,6 @@ def _str_to_componenttype(component: Any) -> ComponentType: return component -def _map_to_componenttypes(data: Dict[Any, Any]) -> Dict[ComponentType, Any]: +def _map_to_componenttypes(data: Mapping[Any, Any]) -> Dict[ComponentType, Any]: """Helper function to map componenttype str keys to ComponentType.""" return {_str_to_componenttype(key): value for key, value in data.items()} diff --git a/src/power_grid_model/core/power_grid_model.py b/src/power_grid_model/core/power_grid_model.py index ea526ae50..508c07076 100644 --- a/src/power_grid_model/core/power_grid_model.py +++ b/src/power_grid_model/core/power_grid_model.py @@ -17,7 +17,7 @@ prepare_output_view, prepare_update_view, ) -from power_grid_model.core.dataset_definitions import ComponentType, _map_to_componenttypes, _str_to_datatype +from power_grid_model.core.dataset_definitions import ComponentType, _map_to_componenttypes, _str_to_componenttype from power_grid_model.core.error_handling import PowerGridBatchError, assert_no_error, handle_errors from power_grid_model.core.index_integer import IdNp, IdxNp from power_grid_model.core.options import Options @@ -147,7 +147,7 @@ def get_indexer(self, component_type: Any, ids: np.ndarray): Returns: Array of indexers, same shape as input array ids """ - component_type = _str_to_datatype(component_type) + component_type = _str_to_componenttype(component_type) ids_c = np.ascontiguousarray(ids, dtype=IdNp).ctypes.data_as(IDPtr) indexer = np.empty_like(ids, dtype=IdxNp, order="C") indexer_c = indexer.ctypes.data_as(IdxPtr) diff --git a/src/power_grid_model/core/serialization.py b/src/power_grid_model/core/serialization.py index 939302180..93be5be77 100644 --- a/src/power_grid_model/core/serialization.py +++ b/src/power_grid_model/core/serialization.py @@ -13,7 +13,7 @@ import numpy as np -from power_grid_model import ComponentType, DataType +from power_grid_model.core.dataset_definitions import ComponentType, DataType, _map_to_componenttypes, _str_to_datatype from power_grid_model.core.error_handling import assert_no_error from power_grid_model.core.index_integer import IdxC from power_grid_model.core.power_grid_core import ( @@ -277,6 +277,8 @@ def json_serialize( Returns: A serialized string containing the dataset. """ + data = _map_to_componenttypes(data) + dataset_type = _str_to_datatype(dataset_type) result = JsonSerializer(data=data, dataset_type=dataset_type).dump(use_compact_list=use_compact_list, indent=indent) assert_no_error() return result @@ -327,6 +329,8 @@ def msgpack_serialize( Returns: A serialized string containing the dataset. """ + data = _map_to_componenttypes(data) + dataset_type = _str_to_datatype(dataset_type) result = MsgpackSerializer(data=data, dataset_type=dataset_type).dump(use_compact_list=use_compact_list) assert_no_error() return result diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index 5b60523fe..ada0ad43d 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -13,11 +13,11 @@ import numpy as np -from power_grid_model.core.dataset_definitions import DataType, _map_to_componenttypes from power_grid_model._utils import ( get_and_verify_batch_sizes as _get_and_verify_batch_sizes, get_batch_size as _get_batch_size, ) +from power_grid_model.core.dataset_definitions import DataType, _map_to_componenttypes from power_grid_model.core.power_grid_dataset import get_dataset_type from power_grid_model.core.serialization import ( # pylint: disable=unused-import json_deserialize, @@ -166,6 +166,7 @@ def msgpack_serialize_to_file( Returns: Save to file. """ + data = _map_to_componenttypes(data) result = msgpack_serialize(data=data, dataset_type=dataset_type, use_compact_list=use_compact_list) with open(file_path, mode="wb") as file_pointer: diff --git a/src/power_grid_model/validation/utils.py b/src/power_grid_model/validation/utils.py index 90a167dac..dc5559b43 100644 --- a/src/power_grid_model/validation/utils.py +++ b/src/power_grid_model/validation/utils.py @@ -10,7 +10,8 @@ import numpy as np -from power_grid_model import ComponentType, DataType, power_grid_meta_data +from power_grid_model import power_grid_meta_data +from power_grid_model.core.dataset_definitions import ComponentType, DataType, _str_to_componenttype from power_grid_model.data_types import SingleDataset from power_grid_model.validation.errors import ValidationError @@ -161,10 +162,11 @@ def errors_to_string( return msg -def nan_type(component: ComponentType, field: str, data_type: DataType = DataType.input): +def nan_type(component: Union[str, ComponentType], field: str, data_type: DataType = DataType.input): """ Helper function to retrieve the nan value for a certain field as defined in the power_grid_meta_data. """ + component = _str_to_componenttype(component) return power_grid_meta_data[data_type][component].nans[field] diff --git a/tests/unit/test_dataset.py b/tests/unit/test_dataset.py index 8cea7518d..5b138adcb 100644 --- a/tests/unit/test_dataset.py +++ b/tests/unit/test_dataset.py @@ -7,21 +7,22 @@ import numpy as np import pytest +from power_grid_model.core.dataset_definitions import ComponentType, DataType from power_grid_model.core.power_grid_dataset import CConstDataset, get_dataset_type from power_grid_model.core.power_grid_meta import power_grid_meta_data from power_grid_model.errors import PowerGridError def input_dataset_types(): - return ["input"] + return [DataType.input] def update_dataset_types(): - return ["update"] + return [DataType.update] def output_dataset_types(): - return ["sym_output", "asym_output", "sc_output"] + return [DataType.sym_output, DataType.asym_output, DataType.sc_output] def all_dataset_types(): @@ -37,8 +38,8 @@ def test_get_dataset_type(dataset_type): assert ( get_dataset_type( data={ - "node": np.zeros(1, dtype=power_grid_meta_data[dataset_type]["node"]), - "sym_load": np.zeros(1, dtype=power_grid_meta_data[dataset_type]["sym_load"]), + ComponentType.node: np.zeros(1, dtype=power_grid_meta_data[dataset_type]["node"]), + ComponentType.sym_load: np.zeros(1, dtype=power_grid_meta_data[dataset_type]["sym_load"]), } ) == dataset_type @@ -92,7 +93,7 @@ def test_const_dataset__conflicting_data(): def test_const_dataset__single_data(dataset_type): - components = {"node": 3, "sym_load": 2, "asym_load": 4} + components = {ComponentType.node: 3, ComponentType.sym_load: 2, ComponentType.asym_load: 4} data = { component: np.zeros(shape=count, dtype=power_grid_meta_data[dataset_type][component]) for component, count in components.items() @@ -114,7 +115,7 @@ def test_const_dataset__single_data(dataset_type): @pytest.mark.parametrize("batch_size", (0, 1, 3)) def test_const_dataset__uniform_batch_data(dataset_type, batch_size): - components = {"node": 3, "sym_load": 2, "asym_load": 4} + components = {ComponentType.node: 3, ComponentType.sym_load: 2, ComponentType.asym_load: 4} data = { component: np.zeros(shape=(batch_size, count), dtype=power_grid_meta_data[dataset_type][component]) for component, count in components.items() @@ -136,21 +137,21 @@ def test_const_dataset__uniform_batch_data(dataset_type, batch_size): def test_const_dataset__sparse_batch_data(dataset_type): batch_size = 3 - components = {"node": 3, "sym_load": 2, "asym_load": 4, "link": 4} + components = {ComponentType.node: 3, ComponentType.sym_load: 2, ComponentType.asym_load: 4, ComponentType.link: 4} data = { - "node": { + ComponentType.node: { "data": np.zeros(shape=3, dtype=power_grid_meta_data[dataset_type]["node"]), "indptr": np.array([0, 2, 3, 3]), }, - "sym_load": { + ComponentType.sym_load: { "data": np.zeros(shape=2, dtype=power_grid_meta_data[dataset_type]["sym_load"]), "indptr": np.array([0, 0, 1, 2]), }, - "asym_load": { + ComponentType.asym_load: { "data": np.zeros(shape=4, dtype=power_grid_meta_data[dataset_type]["asym_load"]), "indptr": np.array([0, 2, 3, 4]), }, - "link": np.zeros(shape=(batch_size, 4), dtype=power_grid_meta_data[dataset_type]["link"]), + ComponentType.link: np.zeros(shape=(batch_size, 4), dtype=power_grid_meta_data[dataset_type]["link"]), } dataset = CConstDataset(data, dataset_type) @@ -163,14 +164,24 @@ def test_const_dataset__sparse_batch_data(dataset_type): assert info.batch_size() == 3 assert info.n_components() == len(components) assert info.components() == list(components) - assert info.elements_per_scenario() == {"node": -1, "sym_load": -1, "asym_load": -1, "link": 4} - assert info.total_elements() == {"node": 3, "sym_load": 2, "asym_load": 4, "link": batch_size * 4} + assert info.elements_per_scenario() == { + ComponentType.node: -1, + ComponentType.sym_load: -1, + ComponentType.asym_load: -1, + ComponentType.link: 4, + } + assert info.total_elements() == { + ComponentType.node: 3, + ComponentType.sym_load: 2, + ComponentType.asym_load: 4, + ComponentType.link: batch_size * 4, + } def test_const_dataset__mixed_batch_size(dataset_type): data = { - "node": np.zeros(shape=(2, 3), dtype=power_grid_meta_data[dataset_type]["node"]), - "line": np.zeros(shape=(3, 3), dtype=power_grid_meta_data[dataset_type]["line"]), + ComponentType.node: np.zeros(shape=(2, 3), dtype=power_grid_meta_data[dataset_type]["node"]), + ComponentType.line: np.zeros(shape=(3, 3), dtype=power_grid_meta_data[dataset_type]["line"]), } with pytest.raises(ValueError): CConstDataset(data, dataset_type) diff --git a/tests/unit/test_serialization.py b/tests/unit/test_serialization.py index 05115bdf1..c2f5c48a8 100644 --- a/tests/unit/test_serialization.py +++ b/tests/unit/test_serialization.py @@ -9,7 +9,7 @@ import numpy as np import pytest -from power_grid_model import ComponentType +from power_grid_model import ComponentType, DataType from power_grid_model.core.power_grid_dataset import get_dataset_type from power_grid_model.utils import json_deserialize, json_serialize, msgpack_deserialize, msgpack_serialize @@ -33,7 +33,7 @@ def from_msgpack(data): return msgpack.unpackb(data) -def empty_dataset(dataset_type: str = "input"): +def empty_dataset(dataset_type: str = DataType.input): return {"version": "1.0", "type": dataset_type, "is_batch": False, "attributes": {}, "data": {}} @@ -369,7 +369,9 @@ def test_msgpack_deserialize_data(serialized_data): assert result_type == serialized_data["type"] -@pytest.mark.parametrize("dataset_type", ("input", "update", "sym_output", "asym_output", "sc_output")) +@pytest.mark.parametrize( + "dataset_type", (DataType.input, DataType.update, DataType.sym_output, DataType.asym_output, DataType.sc_output) +) @pytest.mark.parametrize("use_compact_list", (True, False)) def test_json_serialize_empty_dataset(dataset_type, use_compact_list: bool): for indent in (-1, 0, 2, 4): @@ -384,7 +386,9 @@ def test_json_serialize_empty_dataset(dataset_type, use_compact_list: bool): json_serialize({}, use_compact_list=use_compact_list, indent=indent) -@pytest.mark.parametrize("dataset_type", ("input", "update", "sym_output", "asym_output", "sc_output")) +@pytest.mark.parametrize( + "dataset_type", (DataType.input, DataType.update, DataType.sym_output, DataType.asym_output, DataType.sc_output) +) def test_msgpack_serialize_empty_dataset(dataset_type): reference = empty_dataset(dataset_type) for use_compact_list in (True, False): diff --git a/tests/unit/validation/test_input_validation.py b/tests/unit/validation/test_input_validation.py index f1e809e9f..c8c84003d 100644 --- a/tests/unit/validation/test_input_validation.py +++ b/tests/unit/validation/test_input_validation.py @@ -7,7 +7,16 @@ import numpy as np import pytest -from power_grid_model import Branch3Side, BranchSide, LoadGenType, MeasuredTerminalType, WindingType, initialize_array +from power_grid_model import ( + Branch3Side, + BranchSide, + ComponentType, + DataType, + LoadGenType, + MeasuredTerminalType, + WindingType, + initialize_array, +) from power_grid_model.enum import CalculationType, FaultPhase, FaultType from power_grid_model.validation import validate_input_data from power_grid_model.validation.errors import ( @@ -31,12 +40,12 @@ @pytest.fixture -def input_data() -> Dict[str, np.ndarray]: - node = initialize_array("input", "node", 4) +def input_data() -> Dict[ComponentType, np.ndarray]: + node = initialize_array(DataType.input, ComponentType.node, 4) node["id"] = [0, 2, 1, 2] node["u_rated"] = [10.5e3, 10.5e3, 0, 10.5e3] - line = initialize_array("input", "line", 3) + line = initialize_array(DataType.input, ComponentType.line, 3) line["id"] = [3, 4, 5] line["from_node"] = [0, -1, 2] line["to_node"] = [2, 1, 8] @@ -48,14 +57,14 @@ def input_data() -> Dict[str, np.ndarray]: line["x0"] = [0, 0, 50] line["i_n"] = [-3, 0, 50] - link = initialize_array("input", "link", 2) + link = initialize_array(DataType.input, ComponentType.link, 2) link["id"] = [12, 13] link["from_node"] = [0, -1] link["to_node"] = [8, 1] link["from_status"] = [3, 1] link["to_status"] = [0, 4] - transformer = initialize_array("input", "transformer", 3) + transformer = initialize_array(DataType.input, ComponentType.transformer, 3) transformer["id"] = [1, 14, 15] transformer["from_node"] = [1, 7, 2] # TODO check from node 1 to node 1 transformer["to_node"] = [1, 8, 1] @@ -82,7 +91,7 @@ def input_data() -> Dict[str, np.ndarray]: transformer["pk_min"] = [300.0, 0.0, nan_type("transformer", "pk_min")] transformer["pk_max"] = [400.0, -0.1, nan_type("transformer", "pk_max")] - three_winding_transformer = initialize_array("input", "three_winding_transformer", 4) + three_winding_transformer = initialize_array(DataType.input, ComponentType.three_winding_transformer, 4) three_winding_transformer["id"] = [1, 28, 29, 30] three_winding_transformer["node_1"] = [0, 1, 9, 2] three_winding_transformer["node_2"] = [1, 15, 1, 0] @@ -158,7 +167,7 @@ def input_data() -> Dict[str, np.ndarray]: three_winding_transformer["pk_13_max"] = [-40, nan_type("three_winding_transformer", "pk_12_max"), 40, 50] three_winding_transformer["pk_23_max"] = [-120, nan_type("three_winding_transformer", "pk_12_max"), 40, 30] - transformer_tap_regulator = initialize_array("input", "transformer_tap_regulator", 5) + transformer_tap_regulator = initialize_array(DataType.input, ComponentType.transformer_tap_regulator, 5) transformer_tap_regulator["id"] = [51, 52, 53, 54, 1] transformer_tap_regulator["status"] = [0, -1, 2, 1, 5] transformer_tap_regulator["regulated_object"] = [14, 15, 28, 14, 2] @@ -168,7 +177,7 @@ def input_data() -> Dict[str, np.ndarray]: transformer_tap_regulator["line_drop_compensation_r"] = [0.0, -1.0, 1.0, 0.0, 2.0] transformer_tap_regulator["line_drop_compensation_x"] = [0.0, 4.0, 2.0, 0.0, -4.0] - source = initialize_array("input", "source", 3) + source = initialize_array(DataType.input, ComponentType.source, 3) source["id"] = [16, 17, 1] source["node"] = [10, 1, 2] source["status"] = [0, -1, 2] @@ -177,42 +186,42 @@ def input_data() -> Dict[str, np.ndarray]: source["rx_ratio"] = [0.0, -30.0, 300.0] source["z01_ratio"] = [-1.0, 0.0, 200.0] - shunt = initialize_array("input", "shunt", 3) + shunt = initialize_array(DataType.input, ComponentType.shunt, 3) shunt["id"] = [18, 19, 1] shunt["node"] = [10, 1, 2] shunt["status"] = [0, -1, 2] - sym_load = initialize_array("input", "sym_load", 3) + sym_load = initialize_array(DataType.input, ComponentType.sym_load, 3) sym_load["id"] = [1, 20, 21] sym_load["type"] = [1, 0, 5] sym_load["node"] = [10, 1, 2] sym_load["status"] = [0, -1, 2] - sym_gen = initialize_array("input", "sym_gen", 3) + sym_gen = initialize_array(DataType.input, ComponentType.sym_gen, 3) sym_gen["id"] = [1, 22, 23] sym_gen["type"] = [2, -1, 1] sym_gen["node"] = [10, 1, 2] sym_gen["status"] = [0, -1, 2] - asym_load = initialize_array("input", "asym_load", 3) + asym_load = initialize_array(DataType.input, ComponentType.asym_load, 3) asym_load["id"] = [1, 24, 25] asym_load["type"] = [5, 0, 2] asym_load["node"] = [10, 1, 2] asym_load["status"] = [0, -1, 2] - asym_gen = initialize_array("input", "asym_gen", 3) + asym_gen = initialize_array(DataType.input, ComponentType.asym_gen, 3) asym_gen["id"] = [1, 26, 27] asym_gen["type"] = [-1, 5, 2] asym_gen["node"] = [10, 1, 2] asym_gen["status"] = [0, -1, 2] - sym_voltage_sensor = initialize_array("input", "sym_voltage_sensor", 4) + sym_voltage_sensor = initialize_array(DataType.input, ComponentType.sym_voltage_sensor, 4) sym_voltage_sensor["id"] = [7, 8, 9, 10] sym_voltage_sensor["measured_object"] = [2, 3, 1, 200] sym_voltage_sensor["u_measured"] = [0.0, 10.4e3, 10.6e3, -20.0] sym_voltage_sensor["u_sigma"] = [1.0, np.nan, 0.0, -1.0] - asym_voltage_sensor = initialize_array("input", "asym_voltage_sensor", 4) + asym_voltage_sensor = initialize_array(DataType.input, ComponentType.asym_voltage_sensor, 4) asym_voltage_sensor["id"] = [7, 8, 9, 10] asym_voltage_sensor["measured_object"] = [2, 3, 1, 200] asym_voltage_sensor["u_measured"] = [ @@ -223,19 +232,19 @@ def input_data() -> Dict[str, np.ndarray]: ] asym_voltage_sensor["u_sigma"] = [1.0, np.nan, 0.0, -1.0] - sym_power_sensor = initialize_array("input", "sym_power_sensor", 4) + sym_power_sensor = initialize_array(DataType.input, ComponentType.sym_power_sensor, 4) sym_power_sensor["id"] = [7, 8, 9, 10] sym_power_sensor["measured_object"] = [12, 3, 13, 200] sym_power_sensor["power_sigma"] = [1.0, np.nan, 0.0, -1.0] sym_power_sensor["measured_terminal_type"] = [1, 1, 10, 1] - asym_power_sensor = initialize_array("input", "asym_power_sensor", 4) + asym_power_sensor = initialize_array(DataType.input, ComponentType.asym_power_sensor, 4) asym_power_sensor["id"] = [7, 8, 9, 10] asym_power_sensor["measured_object"] = [12, 3, 13, 200] asym_power_sensor["power_sigma"] = [1.0, np.nan, 0.0, -1.0] asym_power_sensor["measured_terminal_type"] = [1, 1, 10, 1] - fault = initialize_array("input", "fault", 20) + fault = initialize_array(DataType.input, ComponentType.fault, 20) fault["id"] = [1] + list(range(32, 51)) fault["status"] = [0, -1, 2] + 17 * [1] fault["fault_type"] = 6 * [0] + 4 * [1] + 4 * [2] + 4 * [3] + [nan_type("fault", "fault_type"), 4] @@ -245,23 +254,23 @@ def input_data() -> Dict[str, np.ndarray]: fault["x_f"] = [-1.0, 0.0, 1.0] + 17 * [nan_type("fault", "x_f")] data = { - "node": node, - "line": line, - "link": link, - "transformer": transformer, - "three_winding_transformer": three_winding_transformer, - "transformer_tap_regulator": transformer_tap_regulator, - "source": source, - "shunt": shunt, - "sym_load": sym_load, - "sym_gen": sym_gen, - "asym_load": asym_load, - "asym_gen": asym_gen, - "sym_voltage_sensor": sym_voltage_sensor, - "asym_voltage_sensor": asym_voltage_sensor, - "sym_power_sensor": sym_power_sensor, - "asym_power_sensor": asym_power_sensor, - "fault": fault, + ComponentType.node: node, + ComponentType.line: line, + ComponentType.link: link, + ComponentType.transformer: transformer, + ComponentType.three_winding_transformer: three_winding_transformer, + ComponentType.transformer_tap_regulator: transformer_tap_regulator, + ComponentType.source: source, + ComponentType.shunt: shunt, + ComponentType.sym_load: sym_load, + ComponentType.sym_gen: sym_gen, + ComponentType.asym_load: asym_load, + ComponentType.asym_gen: asym_gen, + ComponentType.sym_voltage_sensor: sym_voltage_sensor, + ComponentType.asym_voltage_sensor: asym_voltage_sensor, + ComponentType.sym_power_sensor: sym_power_sensor, + ComponentType.asym_power_sensor: asym_power_sensor, + ComponentType.fault: fault, } return data @@ -272,50 +281,50 @@ def test_validate_input_data_sym_calculation(input_data): assert ( MultiComponentNotUniqueError( [ - ("asym_gen", "id"), - ("asym_load", "id"), - ("asym_power_sensor", "id"), - ("asym_voltage_sensor", "id"), - ("node", "id"), - ("shunt", "id"), - ("source", "id"), - ("sym_gen", "id"), - ("sym_load", "id"), - ("sym_power_sensor", "id"), - ("sym_voltage_sensor", "id"), - ("transformer", "id"), - ("three_winding_transformer", "id"), - ("fault", "id"), - ("transformer_tap_regulator", "id"), + (ComponentType.asym_gen, "id"), + (ComponentType.asym_load, "id"), + (ComponentType.asym_power_sensor, "id"), + (ComponentType.asym_voltage_sensor, "id"), + (ComponentType.node, "id"), + (ComponentType.shunt, "id"), + (ComponentType.source, "id"), + (ComponentType.sym_gen, "id"), + (ComponentType.sym_load, "id"), + (ComponentType.sym_power_sensor, "id"), + (ComponentType.sym_voltage_sensor, "id"), + (ComponentType.transformer, "id"), + (ComponentType.three_winding_transformer, "id"), + (ComponentType.fault, "id"), + (ComponentType.transformer_tap_regulator, "id"), ], [ - ("asym_gen", 1), - ("asym_load", 1), - ("asym_power_sensor", 7), - ("asym_power_sensor", 8), - ("asym_power_sensor", 9), - ("asym_power_sensor", 10), - ("asym_voltage_sensor", 7), - ("asym_voltage_sensor", 8), - ("asym_voltage_sensor", 9), - ("asym_voltage_sensor", 10), - ("node", 1), - ("shunt", 1), - ("source", 1), - ("sym_gen", 1), - ("sym_load", 1), - ("sym_power_sensor", 7), - ("sym_power_sensor", 8), - ("sym_power_sensor", 9), - ("sym_power_sensor", 10), - ("sym_voltage_sensor", 7), - ("sym_voltage_sensor", 8), - ("sym_voltage_sensor", 9), - ("sym_voltage_sensor", 10), - ("transformer", 1), - ("three_winding_transformer", 1), - ("fault", 1), - ("transformer_tap_regulator", 1), + (ComponentType.asym_gen, 1), + (ComponentType.asym_load, 1), + (ComponentType.asym_power_sensor, 7), + (ComponentType.asym_power_sensor, 8), + (ComponentType.asym_power_sensor, 9), + (ComponentType.asym_power_sensor, 10), + (ComponentType.asym_voltage_sensor, 7), + (ComponentType.asym_voltage_sensor, 8), + (ComponentType.asym_voltage_sensor, 9), + (ComponentType.asym_voltage_sensor, 10), + (ComponentType.node, 1), + (ComponentType.shunt, 1), + (ComponentType.source, 1), + (ComponentType.sym_gen, 1), + (ComponentType.sym_load, 1), + (ComponentType.sym_power_sensor, 7), + (ComponentType.sym_power_sensor, 8), + (ComponentType.sym_power_sensor, 9), + (ComponentType.sym_power_sensor, 10), + (ComponentType.sym_voltage_sensor, 7), + (ComponentType.sym_voltage_sensor, 8), + (ComponentType.sym_voltage_sensor, 9), + (ComponentType.sym_voltage_sensor, 10), + (ComponentType.transformer, 1), + (ComponentType.three_winding_transformer, 1), + (ComponentType.fault, 1), + (ComponentType.transformer_tap_regulator, 1), ], ) in validation_errors @@ -600,13 +609,13 @@ def test_validate_input_data_transformer_tap_regulator(input_data): def test_fault(input_data): validation_errors = validate_input_data(input_data, calculation_type=CalculationType.short_circuit) - assert InvalidEnumValueError("fault", "fault_type", [50], FaultType) in validation_errors - assert InvalidEnumValueError("fault", "fault_phase", [50], FaultPhase) in validation_errors - assert FaultPhaseError("fault", ("fault_type", "fault_phase"), [1] + list(range(32, 51))) - assert NotGreaterOrEqualError("fault", "r_f", [1], 0) in validation_errors + assert InvalidEnumValueError(ComponentType.fault, "fault_type", [50], FaultType) in validation_errors + assert InvalidEnumValueError(ComponentType.fault, "fault_phase", [50], FaultPhase) in validation_errors + assert FaultPhaseError(ComponentType.fault, ("fault_type", "fault_phase"), [1] + list(range(32, 51))) + assert NotGreaterOrEqualError(ComponentType.fault, "r_f", [1], 0) in validation_errors assert ( NotIdenticalError( - "fault", + ComponentType.fault, "fault_type", list(range(32, 51)), 5 * [0] + 4 * [1] + 4 * [2] + 4 * [3] + [nan_type("fault", "fault_type"), 4], @@ -615,7 +624,7 @@ def test_fault(input_data): ) assert ( NotIdenticalError( - "fault", + ComponentType.fault, "fault_phase", list(range(32, 51)), list(range(2, 7)) + [0, 4, 5, 6] + 2 * list(range(4)) + [nan_type("fault", "fault_phase"), 7], From 20dcf4a7ed46ab976e74d614a918a230f12eee9e Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Wed, 26 Jun 2024 13:47:23 +0200 Subject: [PATCH 36/47] Review comments resolved Signed-off-by: Santiago Figueroa --- code_generation/code_gen.py | 28 +---------- .../core/metadata_enums.py.jinja | 47 +++++++++++-------- .../core/dataset_definitions.py | 3 +- .../core/power_grid_dataset.py | 7 +-- src/power_grid_model/core/power_grid_model.py | 15 +++--- src/power_grid_model/core/serialization.py | 9 ++-- src/power_grid_model/validation/errors.py | 2 +- src/power_grid_model/validation/utils.py | 2 +- 8 files changed, 49 insertions(+), 64 deletions(-) diff --git a/code_generation/code_gen.py b/code_generation/code_gen.py index 7bf8fa33c..109182fa0 100644 --- a/code_generation/code_gen.py +++ b/code_generation/code_gen.py @@ -114,39 +114,13 @@ def render_dataset_class_maps(self, template_path: Path, data_path: Path, output for component_name in component.names: all_components[component_name] = [x.names for x in class_def.full_attributes] all_map[f"{prefix}{dataset.name}"] = all_components - self.render_template(template_path=template_path, output_path=output_path, all_map=all_map) - def render_metadata_types(self, template_path: Path, data_path: Path, output_path: Path): - with open(data_path) as data_file: - json_data = data_file.read() - dataset_meta_data: List[DatasetMapData] = AllDatasetMapData.schema().loads(json_data).all_datasets - - dataset_types = [] - components = [] - for dataset in dataset_meta_data: - if dataset.is_template: - prefixes = ["sym_", "asym_"] - else: - prefixes = [""] - for prefix in prefixes: - dataset_types.append(f"{prefix}{dataset.name}") - - if dataset.name == "input": - for component in dataset.components: - components.append(component.names) - - components = [name for sublist in components for name in sublist] - - self.render_template( - template_path=template_path, output_path=output_path, dataset_types=dataset_types, components=components - ) - def code_gen(self): render_funcs = { "attribute_classes": self.render_attribute_classes, "dataset_class_maps": self.render_dataset_class_maps, - "metadata_enums": self.render_metadata_types, + "metadata_enums": self.render_dataset_class_maps, } # render attribute classes diff --git a/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja index 4eca15897..fa2b0d40b 100644 --- a/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja +++ b/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja @@ -6,17 +6,20 @@ # This file is automatically generated. DO NOT modify it manually! +{% set dataset_types = all_map.keys() %} +{% set components = all_map['input'].keys() %} + from enum import Enum -from typing import Any, Dict +from typing import Any, Dict, Mapping # pylint: disable=invalid-name -class DataType(Enum): +class DataType(str, Enum): """ - A DataType is the type of a :class:`BatchDataset`. + A DataType is the type of a :class:`Dataset` in power grid model. - - Examples: + - Examples: - DataType.input = "input" - DataType.update = "update" @@ -27,11 +30,11 @@ class DataType(Enum): {%- endfor %} -class ComponentType(Enum): +class ComponentType(str, Enum): """ A ComponentType is the type of a grid component. - - Examples: + - Examples: - ComponentType.node = "node" - ComponentType.line = "line" @@ -45,21 +48,25 @@ class ComponentType(Enum): # pylint: enable=invalid-name -def _map_to_datatypes(data: Dict[Any, Any]) -> Dict[DataType, Any]: +def _str_to_datatype(data_type: Any) -> DataType: + """Helper function to transform data_type str to DataType.""" + if isinstance(data_type, str): + return DataType[data_type] + return data_type + + +def _map_to_datatypes(data: Mapping[Any, Any]) -> Dict[DataType, Any]: """Helper function to map datatype str keys to DataType.""" - def map_keys(key: Any): - if isinstance(key, str): - return DataType[key] - return key - - return {map_keys(key): value for key, value in data.items()} + return {_str_to_datatype(key): value for key, value in data.items()} + + +def _str_to_componenttype(component: Any) -> ComponentType: + """Helper function to transform component str to ComponentType.""" + if isinstance(component, str): + return ComponentType[component] + return component -def _map_to_componenttypes(data: Dict[Any, Any]) -> Dict[ComponentType, Any]: +def _map_to_componenttypes(data: Mapping[Any, Any]) -> Dict[ComponentType, Any]: """Helper function to map componenttype str keys to ComponentType.""" - def map_keys(key: Any): - if isinstance(key, str): - return ComponentType[key] - return key - - return {map_keys(key): value for key, value in data.items()} + return {_str_to_componenttype(key): value for key, value in data.items()} diff --git a/src/power_grid_model/core/dataset_definitions.py b/src/power_grid_model/core/dataset_definitions.py index 49bb86d7a..4e81ced1a 100644 --- a/src/power_grid_model/core/dataset_definitions.py +++ b/src/power_grid_model/core/dataset_definitions.py @@ -6,6 +6,7 @@ # This file is automatically generated. DO NOT modify it manually! + from enum import Enum from typing import Any, Dict, Mapping @@ -68,7 +69,7 @@ def _str_to_datatype(data_type: Any) -> DataType: return data_type -def _map_to_datatypes(data: Dict[Any, Any]) -> Dict[DataType, Any]: +def _map_to_datatypes(data: Mapping[Any, Any]) -> Dict[DataType, Any]: """Helper function to map datatype str keys to DataType.""" return {_str_to_datatype(key): value for key, value in data.items()} diff --git a/src/power_grid_model/core/power_grid_dataset.py b/src/power_grid_model/core/power_grid_dataset.py index 30c63e484..eb9663361 100644 --- a/src/power_grid_model/core/power_grid_dataset.py +++ b/src/power_grid_model/core/power_grid_dataset.py @@ -6,7 +6,7 @@ Power grid model raw dataset handler """ -from typing import Any, Dict, List, Mapping, Optional, Union +from typing import Any, List, Mapping, Optional, Union import numpy as np @@ -27,6 +27,7 @@ power_grid_core as pgc, ) from power_grid_model.core.power_grid_meta import DatasetMetaData, power_grid_meta_data +from power_grid_model.data_types import Dataset from power_grid_model.errors import PowerGridError @@ -392,7 +393,7 @@ def __init__(self, dataset_ptr: WritableDatasetPtr): self._schema = power_grid_meta_data[self._dataset_type] self._component_buffer_properties = self._get_buffer_properties(info) - self._data: Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]] = {} + self._data: Dataset = {} self._buffers: Mapping[str, CBuffer] = {} self._add_buffers() @@ -416,7 +417,7 @@ def get_info(self) -> CDatasetInfo: """ return CDatasetInfo(pgc.dataset_writable_get_info(self._writable_dataset)) - def get_data(self) -> Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]: + def get_data(self) -> Dataset: """ Retrieve data from the Power Grid Model dataset. diff --git a/src/power_grid_model/core/power_grid_model.py b/src/power_grid_model/core/power_grid_model.py index 508c07076..77383f8d0 100644 --- a/src/power_grid_model/core/power_grid_model.py +++ b/src/power_grid_model/core/power_grid_model.py @@ -22,6 +22,7 @@ from power_grid_model.core.index_integer import IdNp, IdxNp from power_grid_model.core.options import Options from power_grid_model.core.power_grid_core import ConstDatasetPtr, IDPtr, IdxPtr, ModelPtr, power_grid_core as pgc +from power_grid_model.data_types import Dataset from power_grid_model.enum import ( CalculationMethod, CalculationType, @@ -234,7 +235,7 @@ def _calculate_impl( self, calculation_type: CalculationType, symmetric: bool, - update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]], + update_data: Optional[Dataset], output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]], options: Options, continue_on_batch_error: bool, @@ -299,7 +300,7 @@ def _calculate_power_flow( error_tolerance: float = 1e-8, max_iterations: int = 20, calculation_method: Union[CalculationMethod, str] = CalculationMethod.newton_raphson, - update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dataset] = None, threading: int = -1, output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, @@ -335,7 +336,7 @@ def _calculate_state_estimation( error_tolerance: float = 1e-8, max_iterations: int = 20, calculation_method: Union[CalculationMethod, str] = CalculationMethod.iterative_linear, - update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dataset] = None, threading: int = -1, output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, @@ -366,7 +367,7 @@ def _calculate_short_circuit( self, *, calculation_method: Union[CalculationMethod, str] = CalculationMethod.iec60909, - update_data: Optional[Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]] = None, + update_data: Optional[Dataset] = None, threading: int = -1, output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, @@ -405,7 +406,7 @@ def calculate_power_flow( update_data: Optional[ Union[ Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]], - Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]], + Dataset, ] ] = None, threading: int = -1, @@ -499,7 +500,7 @@ def calculate_state_estimation( update_data: Optional[ Union[ Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]], - Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]], + Dataset, ] ] = None, threading: int = -1, @@ -585,7 +586,7 @@ def calculate_short_circuit( update_data: Optional[ Union[ Dict[str, Union[np.ndarray, Dict[str, np.ndarray]]], - Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]], + Dataset, ] ] = None, threading: int = -1, diff --git a/src/power_grid_model/core/serialization.py b/src/power_grid_model/core/serialization.py index 93be5be77..28c3bc1d0 100644 --- a/src/power_grid_model/core/serialization.py +++ b/src/power_grid_model/core/serialization.py @@ -9,7 +9,7 @@ from abc import ABC, abstractmethod from ctypes import byref from enum import IntEnum -from typing import Dict, Mapping, Optional, Union +from typing import Mapping, Optional, Union import numpy as np @@ -24,6 +24,7 @@ power_grid_core as pgc, ) from power_grid_model.core.power_grid_dataset import CConstDataset, CWritableDataset +from power_grid_model.data_types import Dataset from power_grid_model.errors import PowerGridSerializationError @@ -64,7 +65,7 @@ def __del__(self): if hasattr(self, "_deserializer"): pgc.destroy_deserializer(self._deserializer) - def load(self) -> Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]: + def load(self) -> Dataset: """ Load the deserialized data to a new dataset. @@ -229,7 +230,7 @@ def __new__( return super().__new__(cls, data, SerializationType.MSGPACK, dataset_type=dataset_type) -def json_deserialize(data: Union[str, bytes]) -> Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]: +def json_deserialize(data: Union[str, bytes]) -> Dataset: """ Load serialized JSON data to a new dataset. @@ -284,7 +285,7 @@ def json_serialize( return result -def msgpack_deserialize(data: bytes) -> Dict[ComponentType, Union[np.ndarray, Dict[str, np.ndarray]]]: +def msgpack_deserialize(data: bytes) -> Dataset: """ Load serialized msgpack data to a new dataset. diff --git a/src/power_grid_model/validation/errors.py b/src/power_grid_model/validation/errors.py index a7ff2dbe4..cfc2415c6 100644 --- a/src/power_grid_model/validation/errors.py +++ b/src/power_grid_model/validation/errors.py @@ -332,7 +332,7 @@ def __init__( ): # pylint: disable=too-many-arguments super().__init__(component=component, field=field, ids=ids) - self.ref_components = [ref_components] if isinstance(ref_components, str) else ref_components # type: ignore + self.ref_components = [ref_components] if isinstance(ref_components, (str, ComponentType)) else ref_components self.filters = filters if filters else None @property diff --git a/src/power_grid_model/validation/utils.py b/src/power_grid_model/validation/utils.py index dc5559b43..d5e4cd90f 100644 --- a/src/power_grid_model/validation/utils.py +++ b/src/power_grid_model/validation/utils.py @@ -247,7 +247,7 @@ def get_valid_ids(data: SingleDataset, ref_components: Union[ComponentType, List """ # For convenience, ref_component may be a string and we'll convert it to a 'list' containing that string as it's # single element. - if not isinstance(ref_components, list): + if isinstance(ref_components, (str, ComponentType)): ref_components = [ref_components] # Create a set of ids by chaining the ids of all ref_components From e900f83cd5cb0a565baf456325dbaef130c93bcc Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Wed, 26 Jun 2024 16:51:10 +0200 Subject: [PATCH 37/47] Fixed cyclic import errors. Signed-off-by: Santiago Figueroa --- src/power_grid_model/core/buffer_handling.py | 2 +- src/power_grid_model/core/data_handling.py | 2 +- src/power_grid_model/data_types.py | 2 +- tests/unit/utils.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/power_grid_model/core/buffer_handling.py b/src/power_grid_model/core/buffer_handling.py index 66f7564ad..82e69581f 100644 --- a/src/power_grid_model/core/buffer_handling.py +++ b/src/power_grid_model/core/buffer_handling.py @@ -12,7 +12,7 @@ import numpy as np -from power_grid_model import ComponentType +from power_grid_model.core.dataset_definitions import ComponentType from power_grid_model.core.error_handling import VALIDATOR_MSG from power_grid_model.core.index_integer import IdxC, IdxNp from power_grid_model.core.power_grid_core import IdxPtr, VoidPtr diff --git a/src/power_grid_model/core/data_handling.py b/src/power_grid_model/core/data_handling.py index 2a60a5314..84fe5859f 100644 --- a/src/power_grid_model/core/data_handling.py +++ b/src/power_grid_model/core/data_handling.py @@ -12,7 +12,7 @@ import numpy as np -from power_grid_model import ComponentType, DataType +from power_grid_model.core.dataset_definitions import ComponentType, DataType from power_grid_model.core.power_grid_dataset import CConstDataset, CMutableDataset from power_grid_model.core.power_grid_meta import initialize_array, power_grid_meta_data from power_grid_model.enum import CalculationType diff --git a/src/power_grid_model/data_types.py b/src/power_grid_model/data_types.py index a98cc221d..4ad7f43f4 100644 --- a/src/power_grid_model/data_types.py +++ b/src/power_grid_model/data_types.py @@ -11,7 +11,7 @@ import numpy as np -from power_grid_model import ComponentType +from power_grid_model.core.dataset_definitions import ComponentType # When we're dropping python 3.8, we should introduce proper NumPy type hinting diff --git a/tests/unit/utils.py b/tests/unit/utils.py index ebc33be38..404bc0db7 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -11,7 +11,7 @@ import numpy as np import pytest -from power_grid_model import DataType +from power_grid_model.core.dataset_definitions import DataType from power_grid_model.core.power_grid_model import PowerGridModel from power_grid_model.data_types import Dataset, PythonDataset, SingleDataset from power_grid_model.errors import ( From c2b087468a7e613fd8a1b92954b28044d0994dfa Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Wed, 26 Jun 2024 17:12:03 +0200 Subject: [PATCH 38/47] Docs and user comments. Signed-off-by: Santiago Figueroa --- docs/advanced_documentation/native-data-interface.md | 10 ++++++---- scripts/quick_example.py | 2 +- scripts/quick_example_batch.py | 2 +- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/docs/advanced_documentation/native-data-interface.md b/docs/advanced_documentation/native-data-interface.md index eed5e7de4..48d6f60d9 100644 --- a/docs/advanced_documentation/native-data-interface.md +++ b/docs/advanced_documentation/native-data-interface.md @@ -98,10 +98,11 @@ For example, the following update dataset will set `from_status` of the line #5 and keep the `to_status` as unchanged (change not available). ```python -from power_grid_model import power_grid_meta_data +from power_grid_model import ComponentType, DataType, power_grid_meta_data import numpy as np -line_update = np.empty(shape=1, dtype=power_grid_meta_data['update']['line']['dtype']) +line_update = np.empty(shape=1, dtype=power_grid_meta_data[DataType.update][ComponentType.line]['dtype']) +# line_update = np.empty(shape=1, dtype=power_grid_meta_data['update']['line']['dtype']) is supported as well. line_update['id'] = [5] line_update['from_status'] = [0] line_update['to_status'] = [-128] @@ -137,9 +138,10 @@ One can import the `power_grid_meta_data` to get all the predefined `numpy.dtype The code below creates an array which is compatible with transformer input dataset. ```python -from power_grid_model import power_grid_meta_data +from power_grid_model import ComponentType, DataType, power_grid_meta_data -transformer = np.empty(shape=5, dtype=power_grid_meta_data['input']['transformer']['dtype']) +transformer = np.empty(shape=5, dtype=power_grid_meta_data[DataType.input][ComponentType.transformer]['dtype']) +# transformer = np.empty(shape=5, dtype=power_grid_meta_data['input']['transformer']['dtype']) is supported as well. ``` Furthermore, there is an even more convenient function `initialize_array` diff --git a/scripts/quick_example.py b/scripts/quick_example.py index acff9294b..9f7da6c69 100644 --- a/scripts/quick_example.py +++ b/scripts/quick_example.py @@ -10,7 +10,7 @@ from power_grid_model import DataType, ComponentType, LoadGenType, PowerGridModel, initialize_array # node -node = initialize_array(DataType.input, ComponentType.node, 2) +node = initialize_array(DataType.input, ComponentType.node, 2) # initialize_array("input", "node", 3) is also OK node["id"] = [1, 2] node["u_rated"] = [10.5e3, 10.5e3] diff --git a/scripts/quick_example_batch.py b/scripts/quick_example_batch.py index df7692d42..2568a2ef7 100644 --- a/scripts/quick_example_batch.py +++ b/scripts/quick_example_batch.py @@ -17,7 +17,7 @@ """ # node -node = initialize_array(DataType.input, ComponentType.node, 3) +node = initialize_array(DataType.input, ComponentType.node, 3) # initialize_array("input", "node", 3) is also OK node["id"] = [1, 2, 7] node["u_rated"] = [10.5e3, 10.5e3, 10.5e3] From 84bff9912c83f29e6817f841838d60347df35fd1 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Wed, 26 Jun 2024 17:20:41 +0200 Subject: [PATCH 39/47] Fixed formatting. Signed-off-by: Santiago Figueroa --- scripts/quick_example.py | 7 ++----- scripts/quick_example_batch.py | 4 +--- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/scripts/quick_example.py b/scripts/quick_example.py index 9f7da6c69..491bb1d20 100644 --- a/scripts/quick_example.py +++ b/scripts/quick_example.py @@ -2,15 +2,12 @@ # # SPDX-License-Identifier: MPL-2.0 -from typing import Dict - -import numpy as np import pandas as pd -from power_grid_model import DataType, ComponentType, LoadGenType, PowerGridModel, initialize_array +from power_grid_model import ComponentType, DataType, LoadGenType, PowerGridModel, initialize_array # node -node = initialize_array(DataType.input, ComponentType.node, 2) # initialize_array("input", "node", 3) is also OK +node = initialize_array(DataType.input, ComponentType.node, 2) # initialize_array("input", "node", 3) is also OK node["id"] = [1, 2] node["u_rated"] = [10.5e3, 10.5e3] diff --git a/scripts/quick_example_batch.py b/scripts/quick_example_batch.py index 2568a2ef7..ded4b3a5d 100644 --- a/scripts/quick_example_batch.py +++ b/scripts/quick_example_batch.py @@ -2,8 +2,6 @@ # # SPDX-License-Identifier: MPL-2.0 -from typing import Dict - import numpy as np import pandas as pd @@ -17,7 +15,7 @@ """ # node -node = initialize_array(DataType.input, ComponentType.node, 3) # initialize_array("input", "node", 3) is also OK +node = initialize_array(DataType.input, ComponentType.node, 3) # initialize_array("input", "node", 3) is also OK node["id"] = [1, 2, 7] node["u_rated"] = [10.5e3, 10.5e3, 10.5e3] From 199eedf23c6f389122b2111d6ff30c0be7783bec Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Fri, 28 Jun 2024 13:23:20 +0200 Subject: [PATCH 40/47] Resolved review comments. Signed-off-by: Santiago Figueroa --- .../core/metadata_enums.py.jinja | 39 ++++++++++++------ .../native-data-interface.md | 14 ++++--- docs/api_reference/python-api-reference.md | 2 +- .../Asymmetric Calculation Example.ipynb | 22 +++++----- docs/examples/Make Test Dataset.ipynb | 26 ++++++------ docs/examples/Power Flow Example.ipynb | 26 ++++++------ docs/examples/Short Circuit Example.ipynb | 12 +++--- docs/examples/State Estimation Example.ipynb | 28 ++++++------- docs/examples/Transformer Examples.ipynb | 40 +++++++++---------- docs/examples/Validation Examples.ipynb | 8 ++-- scripts/quick_example.py | 10 ++--- scripts/quick_example_batch.py | 12 +++--- src/power_grid_model/__init__.py | 2 +- src/power_grid_model/_utils.py | 10 +++-- src/power_grid_model/core/data_handling.py | 12 +++--- .../core/dataset_definitions.py | 37 +++++++++++------ .../core/power_grid_dataset.py | 12 +++--- src/power_grid_model/core/power_grid_meta.py | 17 +++++--- src/power_grid_model/core/power_grid_model.py | 16 ++++---- src/power_grid_model/core/serialization.py | 21 ++++++---- src/power_grid_model/utils.py | 16 ++++---- src/power_grid_model/validation/utils.py | 8 ++-- src/power_grid_model/validation/validation.py | 12 +++--- tests/unit/test_dataset.py | 8 ++-- tests/unit/test_dataset_definitions.py | 6 +-- tests/unit/test_serialization.py | 10 +++-- tests/unit/utils.py | 4 +- .../unit/validation/test_input_validation.py | 36 ++++++++--------- .../validation/test_validation_functions.py | 4 +- 29 files changed, 259 insertions(+), 211 deletions(-) diff --git a/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja index fa2b0d40b..934d40b2e 100644 --- a/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja +++ b/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja @@ -15,20 +15,35 @@ from typing import Any, Dict, Mapping # pylint: disable=invalid-name -class DataType(str, Enum): +from enum import Enum + +class DatasetType(str, Enum): """ - A DataType is the type of a :class:`Dataset` in power grid model. + A DatasetType is the type of a :class:`Dataset` in power grid model. - Examples: - - DataType.input = "input" - - DataType.update = "update" + - DatasetType.input = "input" + - DatasetType.update = "update" """ {%- for dataset_type in dataset_types %} {{ dataset_type }} = "{{ dataset_type }}" {%- endfor %} + @classmethod + def contains(cls, member): + """ + Check if member is part of the Enum. + + Args: + member: Member to check. + + Returns: + bool: True if the member is part of the Enum, False otherwise. + """ + return member in cls._member_map_ + class ComponentType(str, Enum): """ @@ -48,25 +63,25 @@ class ComponentType(str, Enum): # pylint: enable=invalid-name -def _str_to_datatype(data_type: Any) -> DataType: - """Helper function to transform data_type str to DataType.""" +def _str_to_datatype(data_type: Any) -> DatasetType: + """Helper function to transform data_type str to DatasetType.""" if isinstance(data_type, str): - return DataType[data_type] + return DatasetType[data_type] return data_type -def _map_to_datatypes(data: Mapping[Any, Any]) -> Dict[DataType, Any]: - """Helper function to map datatype str keys to DataType.""" +def _map_to_datatypes(data: Mapping[Any, Any]) -> Dict[DatasetType, Any]: + """Helper function to map datatype str keys to DatasetType.""" return {_str_to_datatype(key): value for key, value in data.items()} -def _str_to_componenttype(component: Any) -> ComponentType: +def _str_to_component_type(component: Any) -> ComponentType: """Helper function to transform component str to ComponentType.""" if isinstance(component, str): return ComponentType[component] return component -def _map_to_componenttypes(data: Mapping[Any, Any]) -> Dict[ComponentType, Any]: +def _map_to_component_types(data: Mapping[Any, Any]) -> Dict[ComponentType, Any]: """Helper function to map componenttype str keys to ComponentType.""" - return {_str_to_componenttype(key): value for key, value in data.items()} + return {_str_to_component_type(key): value for key, value in data.items()} diff --git a/docs/advanced_documentation/native-data-interface.md b/docs/advanced_documentation/native-data-interface.md index 48d6f60d9..0b17b8056 100644 --- a/docs/advanced_documentation/native-data-interface.md +++ b/docs/advanced_documentation/native-data-interface.md @@ -98,11 +98,12 @@ For example, the following update dataset will set `from_status` of the line #5 and keep the `to_status` as unchanged (change not available). ```python -from power_grid_model import ComponentType, DataType, power_grid_meta_data +from power_grid_model import ComponentType, DatasetType, power_grid_meta_data import numpy as np -line_update = np.empty(shape=1, dtype=power_grid_meta_data[DataType.update][ComponentType.line]['dtype']) -# line_update = np.empty(shape=1, dtype=power_grid_meta_data['update']['line']['dtype']) is supported as well. +line_update = np.empty(shape=1, dtype=power_grid_meta_data[DatasetType.update][ComponentType.line]['dtype']) +# direct string access is supported as well: +# line_update = np.empty(shape=1, dtype=power_grid_meta_data['update']['line']['dtype']) line_update['id'] = [5] line_update['from_status'] = [0] line_update['to_status'] = [-128] @@ -138,10 +139,11 @@ One can import the `power_grid_meta_data` to get all the predefined `numpy.dtype The code below creates an array which is compatible with transformer input dataset. ```python -from power_grid_model import ComponentType, DataType, power_grid_meta_data +from power_grid_model import ComponentType, DatasetType, power_grid_meta_data -transformer = np.empty(shape=5, dtype=power_grid_meta_data[DataType.input][ComponentType.transformer]['dtype']) -# transformer = np.empty(shape=5, dtype=power_grid_meta_data['input']['transformer']['dtype']) is supported as well. +transformer = np.empty(shape=5, dtype=power_grid_meta_data[DatasetType.input][ComponentType.transformer]['dtype']) +# direct string access is supported as well: +# transformer = np.empty(shape=5, dtype=power_grid_meta_data['input']['transformer']['dtype']) ``` Furthermore, there is an even more convenient function `initialize_array` diff --git a/docs/api_reference/python-api-reference.md b/docs/api_reference/python-api-reference.md index 7eae96fef..2e7b9c6ef 100644 --- a/docs/api_reference/python-api-reference.md +++ b/docs/api_reference/python-api-reference.md @@ -32,7 +32,7 @@ SPDX-License-Identifier: MPL-2.0 .. autoclass:: power_grid_model.data_types.BatchArray .. autoclass:: power_grid_model.data_types.DenseBatchArray .. autoclass:: power_grid_model.data_types.SparseBatchArray -.. autoclass:: power_grid_model.dataset_definitions.DataType +.. autoclass:: power_grid_model.dataset_definitions.DatasetType .. autoclass:: power_grid_model.dataset_definitions.ComponentType ``` diff --git a/docs/examples/Asymmetric Calculation Example.ipynb b/docs/examples/Asymmetric Calculation Example.ipynb index 26183217f..f349706fb 100644 --- a/docs/examples/Asymmetric Calculation Example.ipynb +++ b/docs/examples/Asymmetric Calculation Example.ipynb @@ -52,7 +52,7 @@ " # suppress warning about pyarrow as future required dependency\n", " import pandas as pd\n", "\n", - "from power_grid_model import LoadGenType, ComponentType, DataType\n", + "from power_grid_model import LoadGenType, ComponentType, DatasetType\n", "from power_grid_model import PowerGridModel, CalculationMethod, CalculationType, MeasuredTerminalType\n", "from power_grid_model import initialize_array" ] @@ -83,12 +83,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(DataType.input, ComponentType.node, 3)\n", + "node = initialize_array(DatasetType.input, ComponentType.node, 3)\n", "node[\"id\"] = np.array([1, 2, 6])\n", "node[\"u_rated\"] = [10.5e3, 10.5e3, 10.5e3]\n", "\n", "# line\n", - "line = initialize_array(DataType.input, ComponentType.line, 3)\n", + "line = initialize_array(DatasetType.input, ComponentType.line, 3)\n", "line[\"id\"] = [3, 5, 8]\n", "line[\"from_node\"] = [1, 2, 1]\n", "line[\"to_node\"] = [2, 6, 6]\n", @@ -105,7 +105,7 @@ "line[\"tan0\"] = [0, 0, 0]\n", "\n", "# sym load\n", - "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 1)\n", + "sym_load = initialize_array(DatasetType.input, ComponentType.sym_load, 1)\n", "sym_load[\"id\"] = [4]\n", "sym_load[\"node\"] = [2]\n", "sym_load[\"status\"] = [1]\n", @@ -114,7 +114,7 @@ "sym_load[\"q_specified\"] = [5e6]\n", "\n", "# asym load\n", - "asym_load = initialize_array(DataType.input, ComponentType.asym_load, 1)\n", + "asym_load = initialize_array(DatasetType.input, ComponentType.asym_load, 1)\n", "asym_load[\"id\"] = [7]\n", "asym_load[\"node\"] = [6]\n", "asym_load[\"status\"] = [1]\n", @@ -123,7 +123,7 @@ "asym_load[\"q_specified\"] = [[0, 8e6, 2e6]] # the 3 phases may have different loads\n", "\n", "# source\n", - "source = initialize_array(DataType.input, ComponentType.source, 1)\n", + "source = initialize_array(DatasetType.input, ComponentType.source, 1)\n", "source[\"id\"] = [10]\n", "source[\"node\"] = [1]\n", "source[\"status\"] = [1]\n", @@ -269,7 +269,7 @@ "outputs": [], "source": [ "# note the shape of the array, 10 scenarios, 1 objects (asymmetric load_7)\n", - "load_profile = initialize_array(DataType.update, ComponentType.asym_load, (10, 1)) \n", + "load_profile = initialize_array(DatasetType.update, ComponentType.asym_load, (10, 1)) \n", "\n", "# this is a scale of asym_load from 0% to 100%------------------\n", "# the array is an (10, 1, 3) shape, which shows (scenario, object, phase).\n", @@ -541,14 +541,14 @@ "outputs": [], "source": [ "# sym voltage sensor\n", - "sym_voltage_sensor = initialize_array(DataType.input, ComponentType.sym_voltage_sensor, 2)\n", + "sym_voltage_sensor = initialize_array(DatasetType.input, ComponentType.sym_voltage_sensor, 2)\n", "sym_voltage_sensor[\"id\"] = [11, 12]\n", "sym_voltage_sensor[\"measured_object\"] = [1, 2]\n", "sym_voltage_sensor[\"u_sigma\"] = [100, 10]\n", "sym_voltage_sensor[\"u_measured\"] = [6000, 5500]\n", "\n", "# asym voltage sensor\n", - "asym_voltage_sensor = initialize_array(DataType.input, ComponentType.asym_voltage_sensor, 1)\n", + "asym_voltage_sensor = initialize_array(DatasetType.input, ComponentType.asym_voltage_sensor, 1)\n", "asym_voltage_sensor[\"id\"] = [13]\n", "asym_voltage_sensor[\"measured_object\"] = [6]\n", "asym_voltage_sensor[\"u_sigma\"] = [100]\n", @@ -556,7 +556,7 @@ "\n", "\n", "# sym power sensor\n", - "sym_power_sensor = initialize_array(DataType.input, ComponentType.sym_power_sensor, 7)\n", + "sym_power_sensor = initialize_array(DatasetType.input, ComponentType.sym_power_sensor, 7)\n", "sym_power_sensor[\"id\"] = [14, 15, 16, 17, 18, 19, 20]\n", "sym_power_sensor[\"measured_object\"] = [3, 3, 5, 5, 8, 8, 4]\n", "sym_power_sensor[\"measured_terminal_type\"] = [\n", @@ -570,7 +570,7 @@ "sym_power_sensor[\"q_measured\"] = [5e6, -7e6, 2e6, -2e6, 5e6, -5e6, 5e6]\n", "\n", "# asym power sensor\n", - "asym_power_sensor = initialize_array(DataType.input, ComponentType.asym_power_sensor, 1)\n", + "asym_power_sensor = initialize_array(DatasetType.input, ComponentType.asym_power_sensor, 1)\n", "asym_power_sensor[\"id\"] = [21]\n", "asym_power_sensor[\"measured_object\"] = [6]\n", "asym_power_sensor[\"measured_terminal_type\"] = [MeasuredTerminalType.node]\n", diff --git a/docs/examples/Make Test Dataset.ipynb b/docs/examples/Make Test Dataset.ipynb index a22c41ee3..512e537a9 100644 --- a/docs/examples/Make Test Dataset.ipynb +++ b/docs/examples/Make Test Dataset.ipynb @@ -153,7 +153,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "id": "b158e92f", "metadata": {}, "outputs": [], @@ -167,19 +167,19 @@ " # suppress warning about pyarrow as future required dependency\n", " import pandas as pd\n", "\n", - "from power_grid_model import LoadGenType, ComponentType, DataType\n", + "from power_grid_model import LoadGenType, ComponentType, DatasetType\n", "from power_grid_model import PowerGridModel\n", "from power_grid_model import initialize_array\n", "\n", "# network\n", "\n", "# node\n", - "node = initialize_array(DataType.input, ComponentType.node, 3)\n", + "node = initialize_array(DatasetType.input, ComponentType.node, 3)\n", "node[\"id\"] = [1, 2, 6]\n", "node[\"u_rated\"] = [10.5e3, 10.5e3, 10.5e3]\n", "\n", "# line\n", - "line = initialize_array(DataType.input, \"line\", 3)\n", + "line = initialize_array(DatasetType.input, ComponentType.line, 3)\n", "line[\"id\"] = [3, 5, 8]\n", "line[\"from_node\"] = [1, 2, 1]\n", "line[\"to_node\"] = [2, 6, 6]\n", @@ -192,7 +192,7 @@ "line[\"i_n\"] = [1000, 1000, 1000]\n", "\n", "# load\n", - "sym_load = initialize_array(DataType.input, \"sym_load\", 2)\n", + "sym_load = initialize_array(DatasetType.input, ComponentType.sym_load, 2)\n", "sym_load[\"id\"] = [4, 7]\n", "sym_load[\"node\"] = [2, 6]\n", "sym_load[\"status\"] = [1, 1]\n", @@ -201,14 +201,14 @@ "sym_load[\"q_specified\"] = [5e6, 2e6]\n", "\n", "# source\n", - "source = initialize_array(DataType.input, \"source\", 1)\n", + "source = initialize_array(DatasetType.input, ComponentType.source, 1)\n", "source[\"id\"] = [10]\n", "source[\"node\"] = [1]\n", "source[\"status\"] = [1]\n", "source[\"u_ref\"] = [1.0]\n", "\n", "# all\n", - "input_data = {ComponentType.node: node, \"line\": line, \"sym_load\": sym_load, \"source\": source}" + "input_data = {ComponentType.node: node, ComponentType.line: line, ComponentType.sym_load: sym_load, ComponentType.source: source}" ] }, { @@ -223,7 +223,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "id": "724e098a", "metadata": {}, "outputs": [], @@ -238,7 +238,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "id": "071c790a", "metadata": {}, "outputs": [ @@ -340,13 +340,13 @@ "source": [ "# create batch set\n", "\n", - "load_profile = initialize_array(DataType.update, \"sym_load\", (3, 2))\n", + "load_profile = initialize_array(DatasetType.update, ComponentType.sym_load, (3, 2))\n", "load_profile[\"id\"] = [[4, 7]]\n", "# this is a scale of load from 0% to 100%\n", "load_profile[\"p_specified\"] = [[30e6, 15e6]] * np.linspace(0, 1, 3).reshape(-1, 1)\n", "\n", "\n", - "time_series_mutation = {\"sym_load\": load_profile}" + "time_series_mutation = {ComponentType.sym_load: load_profile}" ] }, { @@ -420,7 +420,7 @@ "\n", "batch_result = pgm.calculate_power_flow(update_data=imported_batch_update)\n", "\n", - "print(batch_result[\"sym_load\"][\"p\"])" + "print(batch_result[ComponentType.sym_load][\"p\"])" ] } ], @@ -440,7 +440,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.10.14" }, "vscode": { "interpreter": { diff --git a/docs/examples/Power Flow Example.ipynb b/docs/examples/Power Flow Example.ipynb index 3764b296a..aa37f1cb3 100644 --- a/docs/examples/Power Flow Example.ipynb +++ b/docs/examples/Power Flow Example.ipynb @@ -51,7 +51,7 @@ " # suppress warning about pyarrow as future required dependency\n", " import pandas as pd\n", "\n", - "from power_grid_model import LoadGenType, ComponentType, DataType\n", + "from power_grid_model import LoadGenType, ComponentType, DatasetType\n", "from power_grid_model import PowerGridModel, CalculationMethod, CalculationType\n", "from power_grid_model import initialize_array" ] @@ -80,12 +80,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(DataType.input, ComponentType.node, 3)\n", + "node = initialize_array(DatasetType.input, ComponentType.node, 3)\n", "node[\"id\"] = np.array([1, 2, 6])\n", "node[\"u_rated\"] = [10.5e3, 10.5e3, 10.5e3]\n", "\n", "# line\n", - "line = initialize_array(DataType.input, ComponentType.line, 3)\n", + "line = initialize_array(DatasetType.input, ComponentType.line, 3)\n", "line[\"id\"] = [3, 5, 8]\n", "line[\"from_node\"] = [1, 2, 1]\n", "line[\"to_node\"] = [2, 6, 6]\n", @@ -98,7 +98,7 @@ "line[\"i_n\"] = [1000, 1000, 1000]\n", "\n", "# load\n", - "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 2)\n", + "sym_load = initialize_array(DatasetType.input, ComponentType.sym_load, 2)\n", "sym_load[\"id\"] = [4, 7]\n", "sym_load[\"node\"] = [2, 6]\n", "sym_load[\"status\"] = [1, 1]\n", @@ -107,7 +107,7 @@ "sym_load[\"q_specified\"] = [5e6, 2e6]\n", "\n", "# source\n", - "source = initialize_array(DataType.input, ComponentType.source, 1)\n", + "source = initialize_array(DatasetType.input, ComponentType.source, 1)\n", "source[\"id\"] = [10]\n", "source[\"node\"] = [1]\n", "source[\"status\"] = [1]\n", @@ -440,12 +440,12 @@ "metadata": {}, "outputs": [], "source": [ - "update_sym_load = initialize_array(DataType.update, ComponentType.sym_load, 2)\n", + "update_sym_load = initialize_array(DatasetType.update, ComponentType.sym_load, 2)\n", "update_sym_load[\"id\"] = [4, 7] # same ID\n", "update_sym_load[\"p_specified\"] = [30e6, 15e6] # change active power\n", "# leave reactive power the same, no need to specify\n", "\n", - "update_line = initialize_array(DataType.update, ComponentType.line, 1)\n", + "update_line = initialize_array(DatasetType.update, ComponentType.line, 1)\n", "update_line[\"id\"] = [3] # change line ID 3\n", "update_line[\"from_status\"] = [0] # switch off at from side\n", "# leave to-side swiching status the same, no need to specify\n", @@ -608,7 +608,7 @@ "outputs": [], "source": [ "load_profile = initialize_array(\n", - " DataType.update, ComponentType.sym_load, (10, 2)\n", + " DatasetType.update, ComponentType.sym_load, (10, 2)\n", ") # note the shape of the array, 10 scenarios, 2 objects (loads)\n", "# below is an assignment of shape (1, 2) array to shape (10, 2) array\n", "# the numpy broadcasting rule ensures that the same object ids are repeated 10 times\n", @@ -762,7 +762,7 @@ "metadata": {}, "outputs": [], "source": [ - "line_profile = initialize_array(DataType.update, ComponentType.line, (3, 3)) # 3 scenarios, 3 objects (lines)\n", + "line_profile = initialize_array(DatasetType.update, ComponentType.line, (3, 3)) # 3 scenarios, 3 objects (lines)\n", "# below the same broadcasting trick\n", "line_profile[\"id\"] = [[3, 5, 8]]\n", "# fully specify the status of all lines, even it is the same as the base scenario\n", @@ -830,7 +830,7 @@ "metadata": {}, "outputs": [], "source": [ - "line_profile = initialize_array(DataType.update, ComponentType.line, (3, 1)) # 3 scenarios, 1 object mutation per scenario\n", + "line_profile = initialize_array(DatasetType.update, ComponentType.line, (3, 1)) # 3 scenarios, 1 object mutation per scenario\n", "# for each mutation, only one object is specified\n", "line_profile[\"id\"] = [[3], [5], [8]]\n", "# specify only the changed status (switch off) of one line\n", @@ -957,11 +957,11 @@ "from power_grid_model.errors import PowerGridError, ConflictVoltage\n", "\n", "# node\n", - "node_error = initialize_array(DataType.input, ComponentType.node, 2)\n", + "node_error = initialize_array(DatasetType.input, ComponentType.node, 2)\n", "node_error[\"id\"] = [1, 2]\n", "node_error[\"u_rated\"] = [10.5e3, 150.0e3] # different rated voltages\n", "# line\n", - "line_error = initialize_array(DataType.input, ComponentType.line, 1)\n", + "line_error = initialize_array(DatasetType.input, ComponentType.line, 1)\n", "line_error[\"id\"] = [3]\n", "line_error[\"from_node\"] = [1]\n", "line_error[\"to_node\"] = [2]\n", @@ -1017,7 +1017,7 @@ "source": [ "from power_grid_model.errors import IDNotFound\n", "\n", - "line_update_error = initialize_array(DataType.update, ComponentType.line, 1)\n", + "line_update_error = initialize_array(DatasetType.update, ComponentType.line, 1)\n", "line_update_error[\"id\"] = [12345] # non-existing\n", "line_update_error[\"from_status\"] = [1]\n", "\n", diff --git a/docs/examples/Short Circuit Example.ipynb b/docs/examples/Short Circuit Example.ipynb index 05376916b..3fe6ca06d 100644 --- a/docs/examples/Short Circuit Example.ipynb +++ b/docs/examples/Short Circuit Example.ipynb @@ -54,7 +54,7 @@ " # suppress warning about pyarrow as future required dependency\n", " import pandas as pd\n", "\n", - "from power_grid_model import LoadGenType, ComponentType, DataType\n", + "from power_grid_model import LoadGenType, ComponentType, DatasetType\n", "from power_grid_model import PowerGridModel, CalculationMethod, CalculationType, FaultType, FaultPhase, ShortCircuitVoltageScaling\n", "from power_grid_model import initialize_array" ] @@ -82,12 +82,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(DataType.input, ComponentType.node, 3)\n", + "node = initialize_array(DatasetType.input, ComponentType.node, 3)\n", "node[\"id\"] = np.array([1, 2, 6])\n", "node[\"u_rated\"] = [10.5e3, 10.5e3, 10.5e3]\n", "\n", "# line\n", - "line = initialize_array(DataType.input, ComponentType.line, 3)\n", + "line = initialize_array(DatasetType.input, ComponentType.line, 3)\n", "line[\"id\"] = [3, 5, 8]\n", "line[\"from_node\"] = [1, 2, 1]\n", "line[\"to_node\"] = [2, 6, 6]\n", @@ -100,7 +100,7 @@ "line[\"i_n\"] = [1000, 1000, 1000]\n", "\n", "# load\n", - "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 2)\n", + "sym_load = initialize_array(DatasetType.input, ComponentType.sym_load, 2)\n", "sym_load[\"id\"] = [4, 7]\n", "sym_load[\"node\"] = [2, 6]\n", "sym_load[\"status\"] = [1, 1]\n", @@ -109,14 +109,14 @@ "sym_load[\"q_specified\"] = [5e6, 2e6]\n", "\n", "# source\n", - "source = initialize_array(DataType.input, ComponentType.source, 1)\n", + "source = initialize_array(DatasetType.input, ComponentType.source, 1)\n", "source[\"id\"] = [10]\n", "source[\"node\"] = [1]\n", "source[\"status\"] = [1]\n", "source[\"u_ref\"] = [1.0]\n", "\n", "# fault\n", - "fault = initialize_array(DataType.input, ComponentType.fault, 1)\n", + "fault = initialize_array(DatasetType.input, ComponentType.fault, 1)\n", "fault[\"id\"] = [11]\n", "fault[\"status\"] = [1]\n", "fault[\"fault_object\"] = [6]\n", diff --git a/docs/examples/State Estimation Example.ipynb b/docs/examples/State Estimation Example.ipynb index edcf353de..9366d6f43 100644 --- a/docs/examples/State Estimation Example.ipynb +++ b/docs/examples/State Estimation Example.ipynb @@ -49,7 +49,7 @@ " # suppress warning about pyarrow as future required dependency\n", " import pandas as pd\n", "\n", - "from power_grid_model import LoadGenType, DataType, ComponentType\n", + "from power_grid_model import LoadGenType, DatasetType, ComponentType\n", "from power_grid_model import PowerGridModel, CalculationMethod, CalculationType, MeasuredTerminalType\n", "from power_grid_model import initialize_array" ] @@ -77,12 +77,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(DataType.input, ComponentType.node, 3)\n", + "node = initialize_array(DatasetType.input, ComponentType.node, 3)\n", "node[\"id\"] = [1, 2, 6]\n", "node[\"u_rated\"] = [10.5e3, 10.5e3, 10.5e3]\n", "\n", "# line\n", - "line = initialize_array(DataType.input, ComponentType.line, 3)\n", + "line = initialize_array(DatasetType.input, ComponentType.line, 3)\n", "line[\"id\"] = [3, 5, 8]\n", "line[\"from_node\"] = [1, 2, 1]\n", "line[\"to_node\"] = [2, 6, 6]\n", @@ -95,7 +95,7 @@ "line[\"i_n\"] = [1000, 1000, 1000]\n", "\n", "# load\n", - "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 2)\n", + "sym_load = initialize_array(DatasetType.input, ComponentType.sym_load, 2)\n", "sym_load[\"id\"] = [4, 7]\n", "sym_load[\"node\"] = [2, 6]\n", "sym_load[\"status\"] = [1, 1]\n", @@ -104,21 +104,21 @@ "sym_load[\"q_specified\"] = [5e6, 2e6]\n", "\n", "# source\n", - "source = initialize_array(DataType.input, ComponentType.source, 1)\n", + "source = initialize_array(DatasetType.input, ComponentType.source, 1)\n", "source[\"id\"] = [10]\n", "source[\"node\"] = [1]\n", "source[\"status\"] = [1]\n", "source[\"u_ref\"] = [1.0]\n", "\n", "# voltage sensor\n", - "sym_voltage_sensor = initialize_array(DataType.input, ComponentType.sym_voltage_sensor, 3)\n", + "sym_voltage_sensor = initialize_array(DatasetType.input, ComponentType.sym_voltage_sensor, 3)\n", "sym_voltage_sensor[\"id\"] = [11, 12, 13]\n", "sym_voltage_sensor[\"measured_object\"] = [1, 2, 6]\n", "sym_voltage_sensor[\"u_sigma\"] = [1.0, 1.0, 1.0]\n", "sym_voltage_sensor[\"u_measured\"] = [10489.37, 9997.32, 10102.01]\n", "\n", "# power sensor\n", - "sym_power_sensor = initialize_array(DataType.input, ComponentType.sym_power_sensor, 8)\n", + "sym_power_sensor = initialize_array(DatasetType.input, ComponentType.sym_power_sensor, 8)\n", "sym_power_sensor[\"id\"] = [14, 15, 16, 17, 18, 19, 20, 21]\n", "sym_power_sensor[\"measured_object\"] = [3, 3, 5, 5, 8, 8, 4, 6]\n", "sym_power_sensor[\"measured_terminal_type\"] = [\n", @@ -728,7 +728,7 @@ ], "source": [ "# power sensor\n", - "sym_power_sensor2 = initialize_array(DataType.input, ComponentType.sym_power_sensor, 8)\n", + "sym_power_sensor2 = initialize_array(DatasetType.input, ComponentType.sym_power_sensor, 8)\n", "sym_power_sensor2[\"id\"] = [14, 15, 16, 17, 18, 19, 20, 21]\n", "sym_power_sensor2[\"measured_object\"] = [3, 3, 5, 5, 8, 8, 4, 6]\n", "sym_power_sensor2[\"measured_terminal_type\"] = [\n", @@ -883,12 +883,12 @@ } ], "source": [ - "sym_voltage_sensor_update = initialize_array(DataType.update, ComponentType.sym_voltage_sensor, 1)\n", + "sym_voltage_sensor_update = initialize_array(DatasetType.update, ComponentType.sym_voltage_sensor, 1)\n", "# for each mutation, only one object is specified\n", "sym_voltage_sensor_update[\"id\"] = 13\n", "sym_voltage_sensor_update[\"u_sigma\"] = np.inf # disable this sensor\n", "\n", - "sym_power_sensor_update = initialize_array(DataType.update, ComponentType.sym_power_sensor, 1)\n", + "sym_power_sensor_update = initialize_array(DatasetType.update, ComponentType.sym_power_sensor, 1)\n", "sym_power_sensor_update[\"id\"] = 21\n", "sym_power_sensor_update[\"power_sigma\"] = np.inf # disable this sensor\n", "\n", @@ -933,11 +933,11 @@ "source": [ "from power_grid_model.errors import PowerGridError\n", "\n", - "sym_voltage_sensor_update = initialize_array(DataType.update, ComponentType.sym_voltage_sensor, 3)\n", + "sym_voltage_sensor_update = initialize_array(DatasetType.update, ComponentType.sym_voltage_sensor, 3)\n", "sym_voltage_sensor_update[\"id\"] = sym_voltage_sensor[\"id\"]\n", "sym_voltage_sensor_update[\"u_sigma\"] = np.inf # disable all sensors\n", "\n", - "sym_power_sensor_update = initialize_array(DataType.update, ComponentType.sym_power_sensor, 8)\n", + "sym_power_sensor_update = initialize_array(DatasetType.update, ComponentType.sym_power_sensor, 8)\n", "sym_power_sensor_update[\"id\"] = sym_power_sensor[\"id\"]\n", "sym_power_sensor_update[\"power_sigma\"] = np.inf # disable all sensors\n", "\n", @@ -984,7 +984,7 @@ "outputs": [], "source": [ "sym_voltage_sensor_update = initialize_array(\n", - " DataType.update, ComponentType.sym_voltage_sensor, (4, 3)\n", + " DatasetType.update, ComponentType.sym_voltage_sensor, (4, 3)\n", ") # 4 scenarios, 3 objects per scenario\n", "# for each mutation, only one object is specified\n", "sym_voltage_sensor_update[\"id\"] = [[11, 12, 13]] * 4\n", @@ -996,7 +996,7 @@ "]\n", "sym_voltage_sensor_update[\"u_sigma\"][2, 2] = np.inf # disable the third sensor of the third scenario\n", "\n", - "sym_power_sensor_update = initialize_array(DataType.update, ComponentType.sym_power_sensor, (4, 1))\n", + "sym_power_sensor_update = initialize_array(DatasetType.update, ComponentType.sym_power_sensor, (4, 1))\n", "sym_power_sensor_update[\"id\"] = [21]\n", "sym_power_sensor_update[\"power_sigma\"] = [\n", " [1.0e3],\n", diff --git a/docs/examples/Transformer Examples.ipynb b/docs/examples/Transformer Examples.ipynb index 3fb82b7b0..e2faaa244 100644 --- a/docs/examples/Transformer Examples.ipynb +++ b/docs/examples/Transformer Examples.ipynb @@ -44,7 +44,7 @@ " # suppress warning about pyarrow as future required dependency\n", " import pandas as pd\n", "\n", - "from power_grid_model import LoadGenType, DataType, ComponentType\n", + "from power_grid_model import LoadGenType, DatasetType, ComponentType\n", "from power_grid_model import (\n", " PowerGridModel,\n", " CalculationMethod,\n", @@ -77,12 +77,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(DataType.input, ComponentType.node, 2)\n", + "node = initialize_array(DatasetType.input, ComponentType.node, 2)\n", "node[\"id\"] = np.array([2, 4])\n", "node[\"u_rated\"] = [1e4, 4e2]\n", "\n", "# load\n", - "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 1)\n", + "sym_load = initialize_array(DatasetType.input, ComponentType.sym_load, 1)\n", "sym_load[\"id\"] = [5]\n", "sym_load[\"node\"] = [4]\n", "sym_load[\"status\"] = [1]\n", @@ -91,14 +91,14 @@ "sym_load[\"q_specified\"] = [5e3]\n", "\n", "# source\n", - "source = initialize_array(DataType.input, ComponentType.source, 1)\n", + "source = initialize_array(DatasetType.input, ComponentType.source, 1)\n", "source[\"id\"] = [1]\n", "source[\"node\"] = [2]\n", "source[\"status\"] = [1]\n", "source[\"u_ref\"] = [1.0]\n", "\n", "# transformer\n", - "transformer = initialize_array(DataType.input, ComponentType.transformer, 1)\n", + "transformer = initialize_array(DatasetType.input, ComponentType.transformer, 1)\n", "transformer[\"id\"] = [3]\n", "transformer[\"from_node\"] = [2]\n", "transformer[\"to_node\"] = [4]\n", @@ -248,14 +248,14 @@ ], "source": [ "# voltage sensor\n", - "sym_voltage_sensor = initialize_array(DataType.input, ComponentType.sym_voltage_sensor, 2)\n", + "sym_voltage_sensor = initialize_array(DatasetType.input, ComponentType.sym_voltage_sensor, 2)\n", "sym_voltage_sensor[\"id\"] = [6, 7]\n", "sym_voltage_sensor[\"measured_object\"] = [2, 4]\n", "sym_voltage_sensor[\"u_sigma\"] = [1.0, 1.0]\n", "sym_voltage_sensor[\"u_measured\"] = [1e5, 4e2]\n", "\n", "# power sensor\n", - "sym_power_sensor = initialize_array(DataType.input, ComponentType.sym_power_sensor, 2)\n", + "sym_power_sensor = initialize_array(DatasetType.input, ComponentType.sym_power_sensor, 2)\n", "sym_power_sensor[\"id\"] = [8, 9]\n", "sym_power_sensor[\"measured_object\"] = [3, 3]\n", "sym_power_sensor[\"measured_terminal_type\"] = [\n", @@ -336,12 +336,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(DataType.input, ComponentType.node, 3)\n", + "node = initialize_array(DatasetType.input, ComponentType.node, 3)\n", "node[\"id\"] = np.array([2, 4, 6])\n", "node[\"u_rated\"] = [1e4, 1e2, 1e2]\n", "\n", "# load\n", - "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 2)\n", + "sym_load = initialize_array(DatasetType.input, ComponentType.sym_load, 2)\n", "sym_load[\"id\"] = [5, 7]\n", "sym_load[\"node\"] = [4, 6]\n", "sym_load[\"status\"] = [1]\n", @@ -350,14 +350,14 @@ "sym_load[\"q_specified\"] = [5e3, 5e3]\n", "\n", "# source\n", - "source = initialize_array(DataType.input, ComponentType.source, 1)\n", + "source = initialize_array(DatasetType.input, ComponentType.source, 1)\n", "source[\"id\"] = [1]\n", "source[\"node\"] = [2]\n", "source[\"status\"] = [1]\n", "source[\"u_ref\"] = [1.0]\n", "\n", "# three-winding transformer\n", - "three_winding_transformer = initialize_array(DataType.input, ComponentType.three_winding_transformer, 1)\n", + "three_winding_transformer = initialize_array(DatasetType.input, ComponentType.three_winding_transformer, 1)\n", "three_winding_transformer[\"id\"] = [3]\n", "three_winding_transformer[\"node_1\"] = [2]\n", "three_winding_transformer[\"node_2\"] = [4]\n", @@ -488,14 +488,14 @@ ], "source": [ "# voltage sensor\n", - "sym_voltage_sensor = initialize_array(DataType.input, ComponentType.sym_voltage_sensor, 3)\n", + "sym_voltage_sensor = initialize_array(DatasetType.input, ComponentType.sym_voltage_sensor, 3)\n", "sym_voltage_sensor[\"id\"] = [8, 9, 10]\n", "sym_voltage_sensor[\"measured_object\"] = [2, 4, 6]\n", "sym_voltage_sensor[\"u_sigma\"] = [1.0, 1.0, 1.0]\n", "sym_voltage_sensor[\"u_measured\"] = [1e4, 1e2, 1e2]\n", "\n", "# power sensor\n", - "sym_power_sensor = initialize_array(DataType.input, ComponentType.sym_power_sensor, 3)\n", + "sym_power_sensor = initialize_array(DatasetType.input, ComponentType.sym_power_sensor, 3)\n", "sym_power_sensor[\"id\"] = [11, 12, 13]\n", "sym_power_sensor[\"measured_object\"] = [3, 3, 3]\n", "sym_power_sensor[\"measured_terminal_type\"] = [\n", @@ -573,12 +573,12 @@ "outputs": [], "source": [ "# node\n", - "node = initialize_array(DataType.input, ComponentType.node, 3)\n", + "node = initialize_array(DatasetType.input, ComponentType.node, 3)\n", "node[\"id\"] = [2, 4, 6]\n", "node[\"u_rated\"] = [1e4, 4e2, 4e2]\n", "\n", "# load\n", - "sym_load = initialize_array(DataType.input, ComponentType.sym_load, 1)\n", + "sym_load = initialize_array(DatasetType.input, ComponentType.sym_load, 1)\n", "sym_load[\"id\"] = [7]\n", "sym_load[\"node\"] = [6]\n", "sym_load[\"status\"] = [1]\n", @@ -587,14 +587,14 @@ "sym_load[\"q_specified\"] = [5e3]\n", "\n", "# source\n", - "source = initialize_array(DataType.input, ComponentType.source, 1)\n", + "source = initialize_array(DatasetType.input, ComponentType.source, 1)\n", "source[\"id\"] = [1]\n", "source[\"node\"] = [2]\n", "source[\"status\"] = [1]\n", "source[\"u_ref\"] = [1.0]\n", "\n", "# line\n", - "line = initialize_array(DataType.input, ComponentType.line, 1)\n", + "line = initialize_array(DatasetType.input, ComponentType.line, 1)\n", "line[\"id\"] = [5]\n", "line[\"from_node\"] = [4]\n", "line[\"to_node\"] = [6]\n", @@ -606,7 +606,7 @@ "line[\"tan1\"] = [0.0]\n", "\n", "# transformer\n", - "transformer = initialize_array(DataType.input, ComponentType.transformer, 1)\n", + "transformer = initialize_array(DatasetType.input, ComponentType.transformer, 1)\n", "transformer[\"id\"] = [3]\n", "transformer[\"from_node\"] = [2]\n", "transformer[\"to_node\"] = [4]\n", @@ -629,7 +629,7 @@ "transformer[\"tap_size\"] = [100]\n", "\n", "# transformer tap regulator\n", - "transformer_tap_regulator = initialize_array(DataType.input, ComponentType.transformer_tap_regulator, 1)\n", + "transformer_tap_regulator = initialize_array(DatasetType.input, ComponentType.transformer_tap_regulator, 1)\n", "transformer_tap_regulator[\"id\"] = [8]\n", "transformer_tap_regulator[\"regulated_object\"] = [3]\n", "transformer_tap_regulator[\"status\"] = [1]\n", @@ -1083,7 +1083,7 @@ } ], "source": [ - "transformer_update = initialize_array(DataType.update, ComponentType.transformer, (2, 1))\n", + "transformer_update = initialize_array(DatasetType.update, ComponentType.transformer, (2, 1))\n", "transformer_update[\"id\"] = 3\n", "transformer_update[\"tap_pos\"] = [[0], [1]]\n", "\n", diff --git a/docs/examples/Validation Examples.ipynb b/docs/examples/Validation Examples.ipynb index 9c6a8c3c8..cf3d4c0d7 100644 --- a/docs/examples/Validation Examples.ipynb +++ b/docs/examples/Validation Examples.ipynb @@ -62,17 +62,17 @@ "metadata": {}, "outputs": [], "source": [ - "from power_grid_model import PowerGridModel, DataType, ComponentType, initialize_array\n", + "from power_grid_model import PowerGridModel, DatasetType, ComponentType, initialize_array\n", "\n", "# A power grid containing several errors\n", "\n", "# node\n", - "node_error = initialize_array(DataType.input, ComponentType.node, 3)\n", + "node_error = initialize_array(DatasetType.input, ComponentType.node, 3)\n", "node_error[\"id\"] = [1, 2, 3]\n", "node_error[\"u_rated\"] = [10.5e3]\n", "\n", "# line\n", - "line_error = initialize_array(DataType.input, ComponentType.line, 3)\n", + "line_error = initialize_array(DatasetType.input, ComponentType.line, 3)\n", "line_error[\"id\"] = [4, 5, 6]\n", "line_error[\"from_node\"] = [1, 2, 3]\n", "line_error[\"to_node\"] = [2, 3, 4]\n", @@ -84,7 +84,7 @@ "line_error[\"tan1\"] = [0.0]\n", "\n", "# Power Sensor\n", - "sensor_error = initialize_array(DataType.input, ComponentType.sym_power_sensor, 2)\n", + "sensor_error = initialize_array(DatasetType.input, ComponentType.sym_power_sensor, 2)\n", "sensor_error[\"id\"] = [6, 7]\n", "sensor_error[\"measured_object\"] = [3, 4]\n", "sensor_error[\"measured_terminal_type\"] = [0, 2]\n", diff --git a/scripts/quick_example.py b/scripts/quick_example.py index 491bb1d20..2588b2aeb 100644 --- a/scripts/quick_example.py +++ b/scripts/quick_example.py @@ -4,15 +4,15 @@ import pandas as pd -from power_grid_model import ComponentType, DataType, LoadGenType, PowerGridModel, initialize_array +from power_grid_model import ComponentType, DatasetType, LoadGenType, PowerGridModel, initialize_array # node -node = initialize_array(DataType.input, ComponentType.node, 2) # initialize_array("input", "node", 3) is also OK +node = initialize_array(DatasetType.input, ComponentType.node, 2) # initialize_array("input", "node", 3) is also OK node["id"] = [1, 2] node["u_rated"] = [10.5e3, 10.5e3] # line -line = initialize_array(DataType.input, ComponentType.line, 1) +line = initialize_array(DatasetType.input, ComponentType.line, 1) line["id"] = [3] line["from_node"] = [1] line["to_node"] = [2] @@ -25,7 +25,7 @@ line["i_n"] = [1000] # load -sym_load = initialize_array(DataType.input, ComponentType.sym_load, 1) +sym_load = initialize_array(DatasetType.input, ComponentType.sym_load, 1) sym_load["id"] = [4] sym_load["node"] = [2] sym_load["status"] = [1] @@ -34,7 +34,7 @@ sym_load["q_specified"] = [0.5e6] # source -source = initialize_array(DataType.input, ComponentType.source, 1) +source = initialize_array(DatasetType.input, ComponentType.source, 1) source["id"] = [5] source["node"] = [1] source["status"] = [1] diff --git a/scripts/quick_example_batch.py b/scripts/quick_example_batch.py index ded4b3a5d..13c0b25bb 100644 --- a/scripts/quick_example_batch.py +++ b/scripts/quick_example_batch.py @@ -5,7 +5,7 @@ import numpy as np import pandas as pd -from power_grid_model import ComponentType, DataType, LoadGenType, PowerGridModel, initialize_array +from power_grid_model import ComponentType, DatasetType, LoadGenType, PowerGridModel, initialize_array """ node_1 ---line_3--- node_2 ---line_6--- node_7 @@ -15,12 +15,12 @@ """ # node -node = initialize_array(DataType.input, ComponentType.node, 3) # initialize_array("input", "node", 3) is also OK +node = initialize_array(DatasetType.input, ComponentType.node, 3) # initialize_array("input", "node", 3) is also OK node["id"] = [1, 2, 7] node["u_rated"] = [10.5e3, 10.5e3, 10.5e3] # line -line = initialize_array(DataType.input, ComponentType.line, 2) +line = initialize_array(DatasetType.input, ComponentType.line, 2) line["id"] = [3, 6] line["from_node"] = [1, 2] line["to_node"] = [2, 7] @@ -36,7 +36,7 @@ line["tan0"] = [0.0, 0.0] # zero sequence parameters # load -asym_load = initialize_array(DataType.input, ComponentType.asym_load, 2) +asym_load = initialize_array(DatasetType.input, ComponentType.asym_load, 2) asym_load["id"] = [4, 8] asym_load["node"] = [2, 7] asym_load["status"] = [1, 1] @@ -47,7 +47,7 @@ asym_load["q_specified"] = [[0.5e6, 0.0, 0.0], [0.0, 0.2e6, 0.0]] # input for three phase per entry # source -source = initialize_array(DataType.input, ComponentType.source, 1) +source = initialize_array(DatasetType.input, ComponentType.source, 1) source["id"] = [5] source["node"] = [1] source["status"] = [1] @@ -75,7 +75,7 @@ # batch calculation scaler = np.linspace(0, 1, 1000) batch_p = asym_load["p_specified"].reshape(1, 2, 3) * scaler.reshape(-1, 1, 1) -batch_load = initialize_array(DataType.update, ComponentType.asym_load, (1000, 2)) +batch_load = initialize_array(DatasetType.update, ComponentType.asym_load, (1000, 2)) batch_load["id"] = [[4, 8]] batch_load["p_specified"] = batch_p batch_update = {ComponentType.asym_load: batch_load} diff --git a/src/power_grid_model/__init__.py b/src/power_grid_model/__init__.py index 54277c167..605f0deeb 100644 --- a/src/power_grid_model/__init__.py +++ b/src/power_grid_model/__init__.py @@ -4,7 +4,7 @@ """Power Grid Model""" -from power_grid_model.core.dataset_definitions import ComponentType, DataType +from power_grid_model.core.dataset_definitions import ComponentType, DatasetType from power_grid_model.core.power_grid_meta import initialize_array, power_grid_meta_data from power_grid_model.core.power_grid_model import PowerGridModel from power_grid_model.enum import ( diff --git a/src/power_grid_model/_utils.py b/src/power_grid_model/_utils.py index 4d808a7da..2d4e21607 100644 --- a/src/power_grid_model/_utils.py +++ b/src/power_grid_model/_utils.py @@ -10,17 +10,19 @@ We do not officially support this functionality and may remove features in this library at any given time! """ -from typing import List, Optional, cast +from typing import List, Optional, Union, cast import numpy as np -from power_grid_model import ComponentType +from power_grid_model.core.dataset_definitions import ComponentType from power_grid_model.data_types import ( BatchArray, BatchDataset, BatchList, Dataset, + DenseBatchArray, PythonDataset, + SingleArray, SingleDataset, SinglePythonDataset, SparseBatchArray, @@ -145,7 +147,9 @@ def get_batch_size(batch_data: BatchArray) -> int: return n_batches -def split_numpy_array_in_batches(data: np.ndarray, component: ComponentType) -> List[np.ndarray]: +def split_numpy_array_in_batches( + data: Union[DenseBatchArray, SingleArray], component: ComponentType +) -> List[np.ndarray]: """ Split a single dense numpy array into one or more batches diff --git a/src/power_grid_model/core/data_handling.py b/src/power_grid_model/core/data_handling.py index 84fe5859f..d747be652 100644 --- a/src/power_grid_model/core/data_handling.py +++ b/src/power_grid_model/core/data_handling.py @@ -12,7 +12,7 @@ import numpy as np -from power_grid_model.core.dataset_definitions import ComponentType, DataType +from power_grid_model.core.dataset_definitions import ComponentType, DatasetType from power_grid_model.core.power_grid_dataset import CConstDataset, CMutableDataset from power_grid_model.core.power_grid_meta import initialize_array, power_grid_meta_data from power_grid_model.enum import CalculationType @@ -25,9 +25,9 @@ class OutputType(Enum): - asym_output """ - SYM_OUTPUT = DataType.sym_output - ASYM_OUTPUT = DataType.asym_output - SC_OUTPUT = DataType.sc_output + SYM_OUTPUT = DatasetType.sym_output + ASYM_OUTPUT = DatasetType.asym_output + SC_OUTPUT = DatasetType.sc_output def get_output_type(*, calculation_type: CalculationType, symmetric: bool) -> OutputType: @@ -64,7 +64,7 @@ def prepare_input_view(input_data: Mapping[ComponentType, np.ndarray]) -> CConst Returns: instance of CConstDataset ready to be fed into C API """ - return CConstDataset(input_data, dataset_type=DataType.input) + return CConstDataset(input_data, dataset_type=DatasetType.input) def prepare_update_view( @@ -80,7 +80,7 @@ def prepare_update_view( Returns: instance of CConstDataset ready to be fed into C API """ - return CConstDataset(update_data, dataset_type=DataType.update) + return CConstDataset(update_data, dataset_type=DatasetType.update) def prepare_output_view(output_data: Mapping[ComponentType, np.ndarray], output_type: OutputType) -> CMutableDataset: diff --git a/src/power_grid_model/core/dataset_definitions.py b/src/power_grid_model/core/dataset_definitions.py index 4e81ced1a..930b3fe80 100644 --- a/src/power_grid_model/core/dataset_definitions.py +++ b/src/power_grid_model/core/dataset_definitions.py @@ -13,14 +13,14 @@ # pylint: disable=invalid-name -class DataType(str, Enum): +class DatasetType(str, Enum): """ - A DataType is the type of a :class:`Dataset` in power grid model. + A DatasetType is the type of a :class:`Dataset` in power grid model. - Examples: - - DataType.input = "input" - - DataType.update = "update" + - DatasetType.input = "input" + - DatasetType.update = "update" """ input = "input" @@ -29,6 +29,19 @@ class DataType(str, Enum): update = "update" sc_output = "sc_output" + @classmethod + def contains(cls, member): + """ + Check if member is part of the Enum. + + Args: + member: Member to check. + + Returns: + bool: True if the member is part of the Enum, False otherwise. + """ + return member in cls.__members__ + class ComponentType(str, Enum): """ @@ -62,25 +75,25 @@ class ComponentType(str, Enum): # pylint: enable=invalid-name -def _str_to_datatype(data_type: Any) -> DataType: - """Helper function to transform data_type str to DataType.""" +def _str_to_datatype(data_type: Any) -> DatasetType: + """Helper function to transform data_type str to DatasetType.""" if isinstance(data_type, str): - return DataType[data_type] + return DatasetType[data_type] return data_type -def _map_to_datatypes(data: Mapping[Any, Any]) -> Dict[DataType, Any]: - """Helper function to map datatype str keys to DataType.""" +def _map_to_datatypes(data: Mapping[Any, Any]) -> Dict[DatasetType, Any]: + """Helper function to map datatype str keys to DatasetType.""" return {_str_to_datatype(key): value for key, value in data.items()} -def _str_to_componenttype(component: Any) -> ComponentType: +def _str_to_component_type(component: Any) -> ComponentType: """Helper function to transform component str to ComponentType.""" if isinstance(component, str): return ComponentType[component] return component -def _map_to_componenttypes(data: Mapping[Any, Any]) -> Dict[ComponentType, Any]: +def _map_to_component_types(data: Mapping[Any, Any]) -> Dict[ComponentType, Any]: """Helper function to map componenttype str keys to ComponentType.""" - return {_str_to_componenttype(key): value for key, value in data.items()} + return {_str_to_component_type(key): value for key, value in data.items()} diff --git a/src/power_grid_model/core/power_grid_dataset.py b/src/power_grid_model/core/power_grid_dataset.py index eb9663361..1e6d8ec90 100644 --- a/src/power_grid_model/core/power_grid_dataset.py +++ b/src/power_grid_model/core/power_grid_dataset.py @@ -17,7 +17,7 @@ get_buffer_properties, get_buffer_view, ) -from power_grid_model.core.dataset_definitions import ComponentType, DataType, _str_to_componenttype +from power_grid_model.core.dataset_definitions import ComponentType, DatasetType, _str_to_component_type from power_grid_model.core.error_handling import VALIDATOR_MSG, assert_no_error from power_grid_model.core.power_grid_core import ( ConstDatasetPtr, @@ -92,7 +92,7 @@ def components(self) -> List[ComponentType]: A list of the component names in the dataset """ return [ - _str_to_componenttype(pgc.dataset_info_component_name(self._info, idx)) + _str_to_component_type(pgc.dataset_info_component_name(self._info, idx)) for idx in range(self.n_components()) ] @@ -124,7 +124,7 @@ def total_elements(self) -> Mapping[ComponentType, int]: } -def get_dataset_type(data: Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]]) -> DataType: +def get_dataset_type(data: Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]]) -> DatasetType: """ Deduce the dataset type from the provided dataset. @@ -179,7 +179,7 @@ class CMutableDataset: The dataset will create mutable buffers that the Power Grid Model can use to load data. """ - _dataset_type: DataType + _dataset_type: DatasetType _schema: DatasetMetaData _is_batch: bool _batch_size: int @@ -198,7 +198,7 @@ def __new__( instance._mutable_dataset = MutableDatasetPtr() instance._buffer_views = [] - instance._dataset_type = dataset_type if dataset_type in list(DataType) else get_dataset_type(data) + instance._dataset_type = dataset_type if dataset_type in list(DatasetType) else get_dataset_type(data) instance._schema = power_grid_meta_data[instance._dataset_type] if data: @@ -340,7 +340,7 @@ def __new__( Mapping[ComponentType, np.ndarray], Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], - dataset_type: Optional[DataType] = None, + dataset_type: Optional[DatasetType] = None, ): instance = super().__new__(cls) instance._const_dataset = ConstDatasetPtr() diff --git a/src/power_grid_model/core/power_grid_meta.py b/src/power_grid_model/core/power_grid_meta.py index a6874c7b5..dec9ccc28 100644 --- a/src/power_grid_model/core/power_grid_meta.py +++ b/src/power_grid_model/core/power_grid_meta.py @@ -12,7 +12,12 @@ import numpy as np -from power_grid_model.core.dataset_definitions import ComponentType, DataType, _str_to_componenttype, _str_to_datatype +from power_grid_model.core.dataset_definitions import ( + ComponentType, + DatasetType, + _str_to_component_type, + _str_to_datatype, +) from power_grid_model.core.power_grid_core import AttributePtr, ComponentPtr, DatasetPtr, power_grid_core as pgc @@ -62,7 +67,7 @@ def __getitem__(self, item): DatasetMetaData = Dict[ComponentType, ComponentMetaData] -PowerGridMetaData = Dict[DataType, DatasetMetaData] +PowerGridMetaData = Dict[DatasetType, DatasetMetaData] def _generate_meta_data() -> PowerGridMetaData: @@ -92,7 +97,9 @@ def _generate_meta_dataset(dataset: DatasetPtr) -> DatasetMetaData: n_components = pgc.meta_n_components(dataset) for i in range(n_components): component = pgc.meta_get_component_by_idx(dataset, i) - py_meta_dataset[_str_to_componenttype(pgc.meta_component_name(component))] = _generate_meta_component(component) + py_meta_dataset[_str_to_component_type(pgc.meta_component_name(component))] = _generate_meta_component( + component + ) return py_meta_dataset @@ -158,7 +165,7 @@ def _generate_meta_attributes(component: ComponentPtr) -> dict: def initialize_array( - data_type: Union[str, DataType], + data_type: Union[str, DatasetType], component_type: Union[str, ComponentType], shape: Union[tuple, int], empty: bool = False, @@ -178,7 +185,7 @@ def initialize_array( np structured array with all entries as null value """ data_type = _str_to_datatype(data_type) - component_type = _str_to_componenttype(component_type) + component_type = _str_to_component_type(component_type) if not isinstance(shape, tuple): shape = (shape,) if empty: diff --git a/src/power_grid_model/core/power_grid_model.py b/src/power_grid_model/core/power_grid_model.py index 77383f8d0..2922b5c82 100644 --- a/src/power_grid_model/core/power_grid_model.py +++ b/src/power_grid_model/core/power_grid_model.py @@ -17,7 +17,7 @@ prepare_output_view, prepare_update_view, ) -from power_grid_model.core.dataset_definitions import ComponentType, _map_to_componenttypes, _str_to_componenttype +from power_grid_model.core.dataset_definitions import ComponentType, _map_to_component_types, _str_to_component_type from power_grid_model.core.error_handling import PowerGridBatchError, assert_no_error, handle_errors from power_grid_model.core.index_integer import IdNp, IdxNp from power_grid_model.core.options import Options @@ -113,7 +113,7 @@ def __init__( pgc.destroy_model(self._model_ptr) self._all_component_count = None # create new - input_data = _map_to_componenttypes(input_data) + input_data = _map_to_component_types(input_data) prepared_input = prepare_input_view(input_data) self._model_ptr = pgc.create_model(system_frequency, input_data=prepared_input.get_dataset_ptr()) assert_no_error() @@ -132,12 +132,12 @@ def update(self, *, update_data: Union[Dict[ComponentType, np.ndarray], Dict[str Returns: None """ - update_data = _map_to_componenttypes(update_data) + update_data = _map_to_component_types(update_data) prepared_update = prepare_update_view(update_data) pgc.update_model(self._model, prepared_update.get_dataset_ptr()) assert_no_error() - def get_indexer(self, component_type: Any, ids: np.ndarray): + def get_indexer(self, component_type: Union[ComponentType, str], ids: np.ndarray): """ Get array of indexers given array of ids for component type @@ -148,7 +148,7 @@ def get_indexer(self, component_type: Any, ids: np.ndarray): Returns: Array of indexers, same shape as input array ids """ - component_type = _str_to_componenttype(component_type) + component_type = _str_to_component_type(component_type) ids_c = np.ascontiguousarray(ids, dtype=IdNp).ctypes.data_as(IDPtr) indexer = np.empty_like(ids, dtype=IdxNp, order="C") indexer_c = indexer.ctypes.data_as(IdxPtr) @@ -482,7 +482,7 @@ def calculate_power_flow( error_tolerance=error_tolerance, max_iterations=max_iterations, calculation_method=calculation_method, - update_data=(_map_to_componenttypes(update_data) if update_data is not None else None), + update_data=(_map_to_component_types(update_data) if update_data is not None else None), threading=threading, output_component_types=output_component_types, continue_on_batch_error=continue_on_batch_error, @@ -572,7 +572,7 @@ def calculate_state_estimation( error_tolerance=error_tolerance, max_iterations=max_iterations, calculation_method=calculation_method, - update_data=(_map_to_componenttypes(update_data) if update_data is not None else None), + update_data=(_map_to_component_types(update_data) if update_data is not None else None), threading=threading, output_component_types=output_component_types, continue_on_batch_error=continue_on_batch_error, @@ -651,7 +651,7 @@ def calculate_short_circuit( """ return self._calculate_short_circuit( calculation_method=calculation_method, - update_data=(_map_to_componenttypes(update_data) if update_data is not None else None), + update_data=(_map_to_component_types(update_data) if update_data is not None else None), threading=threading, output_component_types=output_component_types, continue_on_batch_error=continue_on_batch_error, diff --git a/src/power_grid_model/core/serialization.py b/src/power_grid_model/core/serialization.py index 28c3bc1d0..553834966 100644 --- a/src/power_grid_model/core/serialization.py +++ b/src/power_grid_model/core/serialization.py @@ -13,7 +13,12 @@ import numpy as np -from power_grid_model.core.dataset_definitions import ComponentType, DataType, _map_to_componenttypes, _str_to_datatype +from power_grid_model.core.dataset_definitions import ( + ComponentType, + DatasetType, + _map_to_component_types, + _str_to_datatype, +) from power_grid_model.core.error_handling import assert_no_error from power_grid_model.core.index_integer import IdxC from power_grid_model.core.power_grid_core import ( @@ -99,7 +104,7 @@ def __new__( Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], serialization_type: SerializationType, - dataset_type: Optional[DataType] = None, + dataset_type: Optional[DatasetType] = None, ): instance = super().__new__(cls) @@ -209,7 +214,7 @@ def __new__( Mapping[ComponentType, np.ndarray], Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], - dataset_type: Optional[DataType] = None, + dataset_type: Optional[DatasetType] = None, ): return super().__new__(cls, data, SerializationType.JSON, dataset_type=dataset_type) @@ -225,7 +230,7 @@ def __new__( Mapping[ComponentType, np.ndarray], Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], - dataset_type: Optional[DataType] = None, + dataset_type: Optional[DatasetType] = None, ): return super().__new__(cls, data, SerializationType.MSGPACK, dataset_type=dataset_type) @@ -254,7 +259,7 @@ def json_serialize( Mapping[ComponentType, np.ndarray], Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], - dataset_type: Optional[DataType] = None, + dataset_type: Optional[DatasetType] = None, use_compact_list: bool = False, indent: int = 2, ) -> str: @@ -278,7 +283,7 @@ def json_serialize( Returns: A serialized string containing the dataset. """ - data = _map_to_componenttypes(data) + data = _map_to_component_types(data) dataset_type = _str_to_datatype(dataset_type) result = JsonSerializer(data=data, dataset_type=dataset_type).dump(use_compact_list=use_compact_list, indent=indent) assert_no_error() @@ -309,7 +314,7 @@ def msgpack_serialize( Mapping[ComponentType, np.ndarray], Mapping[ComponentType, Union[np.ndarray, Mapping[str, np.ndarray]]], ], - dataset_type: Optional[DataType] = None, + dataset_type: Optional[DatasetType] = None, use_compact_list: bool = False, ) -> bytes: """ @@ -330,7 +335,7 @@ def msgpack_serialize( Returns: A serialized string containing the dataset. """ - data = _map_to_componenttypes(data) + data = _map_to_component_types(data) dataset_type = _str_to_datatype(dataset_type) result = MsgpackSerializer(data=data, dataset_type=dataset_type).dump(use_compact_list=use_compact_list) assert_no_error() diff --git a/src/power_grid_model/utils.py b/src/power_grid_model/utils.py index ada0ad43d..267009ada 100644 --- a/src/power_grid_model/utils.py +++ b/src/power_grid_model/utils.py @@ -17,7 +17,7 @@ get_and_verify_batch_sizes as _get_and_verify_batch_sizes, get_batch_size as _get_batch_size, ) -from power_grid_model.core.dataset_definitions import DataType, _map_to_componenttypes +from power_grid_model.core.dataset_definitions import DatasetType, _map_to_component_types from power_grid_model.core.power_grid_dataset import get_dataset_type from power_grid_model.core.serialization import ( # pylint: disable=unused-import json_deserialize, @@ -108,7 +108,7 @@ def json_deserialize_from_file(file_path: Path) -> Dataset: def json_serialize_to_file( file_path: Path, data: Dataset, - dataset_type: Optional[DataType] = None, + dataset_type: Optional[DatasetType] = None, use_compact_list: bool = False, indent: Optional[int] = 2, ): @@ -124,7 +124,7 @@ def json_serialize_to_file( Returns: Save to file. """ - data = _map_to_componenttypes(data) + data = _map_to_component_types(data) result = json_serialize( data=data, dataset_type=dataset_type, use_compact_list=use_compact_list, indent=-1 if indent is None else indent ) @@ -152,7 +152,7 @@ def msgpack_deserialize_from_file(file_path: Path) -> Dataset: def msgpack_serialize_to_file( - file_path: Path, data: Dataset, dataset_type: Optional[DataType] = None, use_compact_list: bool = False + file_path: Path, data: Dataset, dataset_type: Optional[DatasetType] = None, use_compact_list: bool = False ): """ Export msgpack data in most recent format. @@ -166,7 +166,7 @@ def msgpack_serialize_to_file( Returns: Save to file. """ - data = _map_to_componenttypes(data) + data = _map_to_component_types(data) result = msgpack_serialize(data=data, dataset_type=dataset_type, use_compact_list=use_compact_list) with open(file_path, mode="wb") as file_pointer: @@ -255,7 +255,7 @@ def import_input_data(json_file: Path) -> SingleDataset: """ warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning) - data = _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DataType.input) + data = _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DatasetType.input) assert isinstance(data, dict) assert all(isinstance(component, np.ndarray) and component.ndim == 1 for component in data.values()) return cast_type(SingleDataset, data) @@ -279,11 +279,11 @@ def import_update_data(json_file: Path) -> BatchDataset: return cast_type( BatchDataset, - _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DataType.update), + _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DatasetType.update), ) -def _compatibility_deprecated_import_json_data(json_file: Path, data_type: DataType): +def _compatibility_deprecated_import_json_data(json_file: Path, data_type: DatasetType): with open(json_file, mode="r", encoding="utf-8") as file_pointer: data = json.load(file_pointer) diff --git a/src/power_grid_model/validation/utils.py b/src/power_grid_model/validation/utils.py index d5e4cd90f..e8dd226aa 100644 --- a/src/power_grid_model/validation/utils.py +++ b/src/power_grid_model/validation/utils.py @@ -11,7 +11,7 @@ import numpy as np from power_grid_model import power_grid_meta_data -from power_grid_model.core.dataset_definitions import ComponentType, DataType, _str_to_componenttype +from power_grid_model.core.dataset_definitions import ComponentType, DatasetType, _str_to_component_type from power_grid_model.data_types import SingleDataset from power_grid_model.validation.errors import ValidationError @@ -105,7 +105,7 @@ def update_component_data(component: ComponentType, input_data: np.ndarray, upda for field in update_data.dtype.names: if field == "id": continue - nan = nan_type(component, field, DataType.input) + nan = nan_type(component, field, DatasetType.update) if np.isnan(nan): mask = ~np.isnan(update_data[field]) else: @@ -162,11 +162,11 @@ def errors_to_string( return msg -def nan_type(component: Union[str, ComponentType], field: str, data_type: DataType = DataType.input): +def nan_type(component: Union[str, ComponentType], field: str, data_type: DatasetType = DatasetType.input): """ Helper function to retrieve the nan value for a certain field as defined in the power_grid_meta_data. """ - component = _str_to_componenttype(component) + component = _str_to_component_type(component) return power_grid_meta_data[data_type][component].nans[field] diff --git a/src/power_grid_model/validation/validation.py b/src/power_grid_model/validation/validation.py index daeb99582..3883cfc55 100644 --- a/src/power_grid_model/validation/validation.py +++ b/src/power_grid_model/validation/validation.py @@ -15,7 +15,7 @@ import numpy as np -from power_grid_model import ComponentType, DataType, power_grid_meta_data +from power_grid_model import ComponentType, DatasetType, power_grid_meta_data from power_grid_model._utils import convert_batch_dataset_to_batch_list from power_grid_model.data_types import BatchDataset, Dataset, SingleDataset from power_grid_model.enum import ( @@ -85,7 +85,7 @@ def validate_input_data( """ # A deep copy is made of the input data, since default values will be added in the validation process input_data_copy = copy.deepcopy(input_data) - assert_valid_data_structure(input_data_copy, DataType.input) + assert_valid_data_structure(input_data_copy, DatasetType.input) errors: List[ValidationError] = [] errors += validate_required_values(input_data_copy, calculation_type, symmetric) @@ -127,7 +127,7 @@ def validate_batch_data( Raises: Error: KeyError | TypeError | ValueError: if the data structure is invalid. """ - assert_valid_data_structure(input_data, DataType.input) + assert_valid_data_structure(input_data, DatasetType.input) input_errors: List[ValidationError] = list(validate_unique_ids_across_components(input_data)) @@ -136,7 +136,7 @@ def validate_batch_data( errors = {} for batch, batch_update_data in enumerate(batch_data): - assert_valid_data_structure(batch_update_data, DataType.update) + assert_valid_data_structure(batch_update_data, DatasetType.update) id_errors: List[ValidationError] = list(validate_ids_exist(batch_update_data, input_data)) batch_errors = input_errors + id_errors @@ -151,7 +151,7 @@ def validate_batch_data( return errors if errors else None -def assert_valid_data_structure(data: Dataset, data_type: DataType) -> None: +def assert_valid_data_structure(data: Dataset, data_type: DatasetType) -> None: """ Checks if all component names are valid and if the data inside the component matches the required Numpy structured array as defined in the Power Grid Model meta data. @@ -164,7 +164,7 @@ def assert_valid_data_structure(data: Dataset, data_type: DataType) -> None: Error: KeyError, TypeError """ - if data_type not in {DataType.input, DataType.update}: + if data_type not in {DatasetType.input, DatasetType.update}: raise KeyError(f"Unexpected data type '{data_type}' (should be 'input' or 'update')") component_dtype = {component: meta.dtype for component, meta in power_grid_meta_data[data_type].items()} diff --git a/tests/unit/test_dataset.py b/tests/unit/test_dataset.py index 5b138adcb..32cd3889d 100644 --- a/tests/unit/test_dataset.py +++ b/tests/unit/test_dataset.py @@ -7,22 +7,22 @@ import numpy as np import pytest -from power_grid_model.core.dataset_definitions import ComponentType, DataType +from power_grid_model.core.dataset_definitions import ComponentType, DatasetType from power_grid_model.core.power_grid_dataset import CConstDataset, get_dataset_type from power_grid_model.core.power_grid_meta import power_grid_meta_data from power_grid_model.errors import PowerGridError def input_dataset_types(): - return [DataType.input] + return [DatasetType.input] def update_dataset_types(): - return [DataType.update] + return [DatasetType.update] def output_dataset_types(): - return [DataType.sym_output, DataType.asym_output, DataType.sc_output] + return [DatasetType.sym_output, DatasetType.asym_output, DatasetType.sc_output] def all_dataset_types(): diff --git a/tests/unit/test_dataset_definitions.py b/tests/unit/test_dataset_definitions.py index 6f6ec92e8..79d096316 100644 --- a/tests/unit/test_dataset_definitions.py +++ b/tests/unit/test_dataset_definitions.py @@ -4,19 +4,19 @@ import pytest -from power_grid_model import ComponentType, DataType, power_grid_meta_data +from power_grid_model import ComponentType, DatasetType, power_grid_meta_data def test_power_grid_data_types(): power_grid_data_types = [data_type for data_type in power_grid_meta_data] - gen_power_grid_data_types = [member.value for member in DataType] + gen_power_grid_data_types = [member.value for member in DatasetType] power_grid_data_types.sort() gen_power_grid_data_types.sort() assert power_grid_data_types == gen_power_grid_data_types def test_power_grid_components(): - power_grid_components = [component for component in power_grid_meta_data[DataType.input]] + power_grid_components = [component for component in power_grid_meta_data[DatasetType.input]] gen_power_grid_components = [member.value for member in ComponentType] power_grid_components.sort() gen_power_grid_components.sort() diff --git a/tests/unit/test_serialization.py b/tests/unit/test_serialization.py index c2f5c48a8..15b66cc2b 100644 --- a/tests/unit/test_serialization.py +++ b/tests/unit/test_serialization.py @@ -9,7 +9,7 @@ import numpy as np import pytest -from power_grid_model import ComponentType, DataType +from power_grid_model import ComponentType, DatasetType from power_grid_model.core.power_grid_dataset import get_dataset_type from power_grid_model.utils import json_deserialize, json_serialize, msgpack_deserialize, msgpack_serialize @@ -33,7 +33,7 @@ def from_msgpack(data): return msgpack.unpackb(data) -def empty_dataset(dataset_type: str = DataType.input): +def empty_dataset(dataset_type: DatasetType = DatasetType.input): return {"version": "1.0", "type": dataset_type, "is_batch": False, "attributes": {}, "data": {}} @@ -370,7 +370,8 @@ def test_msgpack_deserialize_data(serialized_data): @pytest.mark.parametrize( - "dataset_type", (DataType.input, DataType.update, DataType.sym_output, DataType.asym_output, DataType.sc_output) + "dataset_type", + (DatasetType.input, DatasetType.update, DatasetType.sym_output, DatasetType.asym_output, DatasetType.sc_output), ) @pytest.mark.parametrize("use_compact_list", (True, False)) def test_json_serialize_empty_dataset(dataset_type, use_compact_list: bool): @@ -387,7 +388,8 @@ def test_json_serialize_empty_dataset(dataset_type, use_compact_list: bool): @pytest.mark.parametrize( - "dataset_type", (DataType.input, DataType.update, DataType.sym_output, DataType.asym_output, DataType.sc_output) + "dataset_type", + (DatasetType.input, DatasetType.update, DatasetType.sym_output, DatasetType.asym_output, DatasetType.sc_output), ) def test_msgpack_serialize_empty_dataset(dataset_type): reference = empty_dataset(dataset_type) diff --git a/tests/unit/utils.py b/tests/unit/utils.py index 404bc0db7..0eb7d0c29 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -11,7 +11,7 @@ import numpy as np import pytest -from power_grid_model.core.dataset_definitions import DataType +from power_grid_model.core.dataset_definitions import DatasetType from power_grid_model.core.power_grid_model import PowerGridModel from power_grid_model.data_types import Dataset, PythonDataset, SingleDataset from power_grid_model.errors import ( @@ -285,7 +285,7 @@ def compare_result(actual: SingleDataset, expected: SingleDataset, rtol: float, ) -def convert_python_to_numpy(data: PythonDataset, data_type: DataType) -> Dataset: +def convert_python_to_numpy(data: PythonDataset, data_type: DatasetType) -> Dataset: """ Convert native python data to internal numpy diff --git a/tests/unit/validation/test_input_validation.py b/tests/unit/validation/test_input_validation.py index c8c84003d..7a02be608 100644 --- a/tests/unit/validation/test_input_validation.py +++ b/tests/unit/validation/test_input_validation.py @@ -11,7 +11,7 @@ Branch3Side, BranchSide, ComponentType, - DataType, + DatasetType, LoadGenType, MeasuredTerminalType, WindingType, @@ -41,11 +41,11 @@ @pytest.fixture def input_data() -> Dict[ComponentType, np.ndarray]: - node = initialize_array(DataType.input, ComponentType.node, 4) + node = initialize_array(DatasetType.input, ComponentType.node, 4) node["id"] = [0, 2, 1, 2] node["u_rated"] = [10.5e3, 10.5e3, 0, 10.5e3] - line = initialize_array(DataType.input, ComponentType.line, 3) + line = initialize_array(DatasetType.input, ComponentType.line, 3) line["id"] = [3, 4, 5] line["from_node"] = [0, -1, 2] line["to_node"] = [2, 1, 8] @@ -57,14 +57,14 @@ def input_data() -> Dict[ComponentType, np.ndarray]: line["x0"] = [0, 0, 50] line["i_n"] = [-3, 0, 50] - link = initialize_array(DataType.input, ComponentType.link, 2) + link = initialize_array(DatasetType.input, ComponentType.link, 2) link["id"] = [12, 13] link["from_node"] = [0, -1] link["to_node"] = [8, 1] link["from_status"] = [3, 1] link["to_status"] = [0, 4] - transformer = initialize_array(DataType.input, ComponentType.transformer, 3) + transformer = initialize_array(DatasetType.input, ComponentType.transformer, 3) transformer["id"] = [1, 14, 15] transformer["from_node"] = [1, 7, 2] # TODO check from node 1 to node 1 transformer["to_node"] = [1, 8, 1] @@ -91,7 +91,7 @@ def input_data() -> Dict[ComponentType, np.ndarray]: transformer["pk_min"] = [300.0, 0.0, nan_type("transformer", "pk_min")] transformer["pk_max"] = [400.0, -0.1, nan_type("transformer", "pk_max")] - three_winding_transformer = initialize_array(DataType.input, ComponentType.three_winding_transformer, 4) + three_winding_transformer = initialize_array(DatasetType.input, ComponentType.three_winding_transformer, 4) three_winding_transformer["id"] = [1, 28, 29, 30] three_winding_transformer["node_1"] = [0, 1, 9, 2] three_winding_transformer["node_2"] = [1, 15, 1, 0] @@ -167,7 +167,7 @@ def input_data() -> Dict[ComponentType, np.ndarray]: three_winding_transformer["pk_13_max"] = [-40, nan_type("three_winding_transformer", "pk_12_max"), 40, 50] three_winding_transformer["pk_23_max"] = [-120, nan_type("three_winding_transformer", "pk_12_max"), 40, 30] - transformer_tap_regulator = initialize_array(DataType.input, ComponentType.transformer_tap_regulator, 5) + transformer_tap_regulator = initialize_array(DatasetType.input, ComponentType.transformer_tap_regulator, 5) transformer_tap_regulator["id"] = [51, 52, 53, 54, 1] transformer_tap_regulator["status"] = [0, -1, 2, 1, 5] transformer_tap_regulator["regulated_object"] = [14, 15, 28, 14, 2] @@ -177,7 +177,7 @@ def input_data() -> Dict[ComponentType, np.ndarray]: transformer_tap_regulator["line_drop_compensation_r"] = [0.0, -1.0, 1.0, 0.0, 2.0] transformer_tap_regulator["line_drop_compensation_x"] = [0.0, 4.0, 2.0, 0.0, -4.0] - source = initialize_array(DataType.input, ComponentType.source, 3) + source = initialize_array(DatasetType.input, ComponentType.source, 3) source["id"] = [16, 17, 1] source["node"] = [10, 1, 2] source["status"] = [0, -1, 2] @@ -186,42 +186,42 @@ def input_data() -> Dict[ComponentType, np.ndarray]: source["rx_ratio"] = [0.0, -30.0, 300.0] source["z01_ratio"] = [-1.0, 0.0, 200.0] - shunt = initialize_array(DataType.input, ComponentType.shunt, 3) + shunt = initialize_array(DatasetType.input, ComponentType.shunt, 3) shunt["id"] = [18, 19, 1] shunt["node"] = [10, 1, 2] shunt["status"] = [0, -1, 2] - sym_load = initialize_array(DataType.input, ComponentType.sym_load, 3) + sym_load = initialize_array(DatasetType.input, ComponentType.sym_load, 3) sym_load["id"] = [1, 20, 21] sym_load["type"] = [1, 0, 5] sym_load["node"] = [10, 1, 2] sym_load["status"] = [0, -1, 2] - sym_gen = initialize_array(DataType.input, ComponentType.sym_gen, 3) + sym_gen = initialize_array(DatasetType.input, ComponentType.sym_gen, 3) sym_gen["id"] = [1, 22, 23] sym_gen["type"] = [2, -1, 1] sym_gen["node"] = [10, 1, 2] sym_gen["status"] = [0, -1, 2] - asym_load = initialize_array(DataType.input, ComponentType.asym_load, 3) + asym_load = initialize_array(DatasetType.input, ComponentType.asym_load, 3) asym_load["id"] = [1, 24, 25] asym_load["type"] = [5, 0, 2] asym_load["node"] = [10, 1, 2] asym_load["status"] = [0, -1, 2] - asym_gen = initialize_array(DataType.input, ComponentType.asym_gen, 3) + asym_gen = initialize_array(DatasetType.input, ComponentType.asym_gen, 3) asym_gen["id"] = [1, 26, 27] asym_gen["type"] = [-1, 5, 2] asym_gen["node"] = [10, 1, 2] asym_gen["status"] = [0, -1, 2] - sym_voltage_sensor = initialize_array(DataType.input, ComponentType.sym_voltage_sensor, 4) + sym_voltage_sensor = initialize_array(DatasetType.input, ComponentType.sym_voltage_sensor, 4) sym_voltage_sensor["id"] = [7, 8, 9, 10] sym_voltage_sensor["measured_object"] = [2, 3, 1, 200] sym_voltage_sensor["u_measured"] = [0.0, 10.4e3, 10.6e3, -20.0] sym_voltage_sensor["u_sigma"] = [1.0, np.nan, 0.0, -1.0] - asym_voltage_sensor = initialize_array(DataType.input, ComponentType.asym_voltage_sensor, 4) + asym_voltage_sensor = initialize_array(DatasetType.input, ComponentType.asym_voltage_sensor, 4) asym_voltage_sensor["id"] = [7, 8, 9, 10] asym_voltage_sensor["measured_object"] = [2, 3, 1, 200] asym_voltage_sensor["u_measured"] = [ @@ -232,19 +232,19 @@ def input_data() -> Dict[ComponentType, np.ndarray]: ] asym_voltage_sensor["u_sigma"] = [1.0, np.nan, 0.0, -1.0] - sym_power_sensor = initialize_array(DataType.input, ComponentType.sym_power_sensor, 4) + sym_power_sensor = initialize_array(DatasetType.input, ComponentType.sym_power_sensor, 4) sym_power_sensor["id"] = [7, 8, 9, 10] sym_power_sensor["measured_object"] = [12, 3, 13, 200] sym_power_sensor["power_sigma"] = [1.0, np.nan, 0.0, -1.0] sym_power_sensor["measured_terminal_type"] = [1, 1, 10, 1] - asym_power_sensor = initialize_array(DataType.input, ComponentType.asym_power_sensor, 4) + asym_power_sensor = initialize_array(DatasetType.input, ComponentType.asym_power_sensor, 4) asym_power_sensor["id"] = [7, 8, 9, 10] asym_power_sensor["measured_object"] = [12, 3, 13, 200] asym_power_sensor["power_sigma"] = [1.0, np.nan, 0.0, -1.0] asym_power_sensor["measured_terminal_type"] = [1, 1, 10, 1] - fault = initialize_array(DataType.input, ComponentType.fault, 20) + fault = initialize_array(DatasetType.input, ComponentType.fault, 20) fault["id"] = [1] + list(range(32, 51)) fault["status"] = [0, -1, 2] + 17 * [1] fault["fault_type"] = 6 * [0] + 4 * [1] + 4 * [2] + 4 * [3] + [nan_type("fault", "fault_type"), 4] diff --git a/tests/unit/validation/test_validation_functions.py b/tests/unit/validation/test_validation_functions.py index ba41b66d8..078dd2b0e 100644 --- a/tests/unit/validation/test_validation_functions.py +++ b/tests/unit/validation/test_validation_functions.py @@ -10,7 +10,7 @@ import pytest from power_grid_model import CalculationType, LoadGenType, MeasuredTerminalType, initialize_array, power_grid_meta_data -from power_grid_model.core.dataset_definitions import ComponentType, DataType +from power_grid_model.core.dataset_definitions import ComponentType, DatasetType from power_grid_model.enum import Branch3Side, BranchSide, CalculationType, FaultType, TapChangingStrategy from power_grid_model.validation import assert_valid_input_data from power_grid_model.validation.errors import ( @@ -34,7 +34,7 @@ validate_values, ) -NaN = power_grid_meta_data[DataType.input][ComponentType.node].nans["id"] +NaN = power_grid_meta_data[DatasetType.input][ComponentType.node].nans["id"] def test_assert_valid_data_structure(): From 601a7a81f488c202dbb5c25f80a55cc21f2c29ac Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Fri, 28 Jun 2024 13:38:22 +0200 Subject: [PATCH 41/47] Resolved review comments. Signed-off-by: Santiago Figueroa --- code_generation/code_gen.py | 4 +--- ...etadata_enums.py.jinja => dataset_class_maps.py.jinja} | 8 +++----- src/power_grid_model/core/dataset_definitions.py | 1 - 3 files changed, 4 insertions(+), 9 deletions(-) rename code_generation/templates/src/power_grid_model/core/{metadata_enums.py.jinja => dataset_class_maps.py.jinja} (93%) diff --git a/code_generation/code_gen.py b/code_generation/code_gen.py index 109182fa0..32ec49a3d 100644 --- a/code_generation/code_gen.py +++ b/code_generation/code_gen.py @@ -120,7 +120,6 @@ def code_gen(self): render_funcs = { "attribute_classes": self.render_attribute_classes, "dataset_class_maps": self.render_dataset_class_maps, - "metadata_enums": self.render_dataset_class_maps, } # render attribute classes @@ -128,8 +127,7 @@ def code_gen(self): for template_path in TEMPLATE_DIR.rglob(f"{template_name}.*.jinja"): output_suffix = template_path.with_suffix("").suffix output_dir = template_path.parent.relative_to(TEMPLATE_DIR) - data_name = template_name if template_name != "metadata_enums" else "dataset_class_maps" - for data_path in DATA_DIR.glob(f"{data_name}/*.json"): + for data_path in DATA_DIR.glob(f"{template_name}/*.json"): output_path = self.base_output_path / output_dir / data_path.with_suffix(output_suffix).name output_path.parent.mkdir(parents=True, exist_ok=True) print(f"Generating file: {output_path}") diff --git a/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja b/code_generation/templates/src/power_grid_model/core/dataset_class_maps.py.jinja similarity index 93% rename from code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja rename to code_generation/templates/src/power_grid_model/core/dataset_class_maps.py.jinja index 934d40b2e..ed02a6b17 100644 --- a/code_generation/templates/src/power_grid_model/core/metadata_enums.py.jinja +++ b/code_generation/templates/src/power_grid_model/core/dataset_class_maps.py.jinja @@ -6,8 +6,8 @@ # This file is automatically generated. DO NOT modify it manually! -{% set dataset_types = all_map.keys() %} -{% set components = all_map['input'].keys() %} +{%- set dataset_types = all_map.keys() %} +{%- set components = all_map['input'].keys() %} from enum import Enum from typing import Any, Dict, Mapping @@ -15,8 +15,6 @@ from typing import Any, Dict, Mapping # pylint: disable=invalid-name -from enum import Enum - class DatasetType(str, Enum): """ A DatasetType is the type of a :class:`Dataset` in power grid model. @@ -42,7 +40,7 @@ class DatasetType(str, Enum): Returns: bool: True if the member is part of the Enum, False otherwise. """ - return member in cls._member_map_ + return member in cls.__members__ class ComponentType(str, Enum): diff --git a/src/power_grid_model/core/dataset_definitions.py b/src/power_grid_model/core/dataset_definitions.py index 930b3fe80..e5d4beb98 100644 --- a/src/power_grid_model/core/dataset_definitions.py +++ b/src/power_grid_model/core/dataset_definitions.py @@ -6,7 +6,6 @@ # This file is automatically generated. DO NOT modify it manually! - from enum import Enum from typing import Any, Dict, Mapping From 6e50b203d1a3d98aa4d21da086be6775d4284fe0 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Fri, 28 Jun 2024 15:27:47 +0200 Subject: [PATCH 42/47] Resolved review comments. Signed-off-by: Santiago Figueroa --- docs/examples/Make Test Dataset.ipynb | 8 ++++---- scripts/quick_example.py | 4 +++- scripts/quick_example_batch.py | 4 +++- src/power_grid_model/core/data_handling.py | 5 +++-- tests/unit/test_dataset_definitions.py | 20 ++++++++++---------- 5 files changed, 23 insertions(+), 18 deletions(-) diff --git a/docs/examples/Make Test Dataset.ipynb b/docs/examples/Make Test Dataset.ipynb index 512e537a9..01bc6b60a 100644 --- a/docs/examples/Make Test Dataset.ipynb +++ b/docs/examples/Make Test Dataset.ipynb @@ -153,7 +153,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "id": "b158e92f", "metadata": {}, "outputs": [], @@ -223,7 +223,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "id": "724e098a", "metadata": {}, "outputs": [], @@ -238,7 +238,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "id": "071c790a", "metadata": {}, "outputs": [ @@ -440,7 +440,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.14" + "version": "3.12.2" }, "vscode": { "interpreter": { diff --git a/scripts/quick_example.py b/scripts/quick_example.py index 2588b2aeb..b5cbcefb0 100644 --- a/scripts/quick_example.py +++ b/scripts/quick_example.py @@ -7,7 +7,9 @@ from power_grid_model import ComponentType, DatasetType, LoadGenType, PowerGridModel, initialize_array # node -node = initialize_array(DatasetType.input, ComponentType.node, 2) # initialize_array("input", "node", 3) is also OK +node = initialize_array(DatasetType.input, ComponentType.node, 2) +# The following is also supported +# node = initialize_array("input", "node", 2) node["id"] = [1, 2] node["u_rated"] = [10.5e3, 10.5e3] diff --git a/scripts/quick_example_batch.py b/scripts/quick_example_batch.py index 13c0b25bb..1889e4e0b 100644 --- a/scripts/quick_example_batch.py +++ b/scripts/quick_example_batch.py @@ -15,7 +15,9 @@ """ # node -node = initialize_array(DatasetType.input, ComponentType.node, 3) # initialize_array("input", "node", 3) is also OK +node = initialize_array(DatasetType.input, ComponentType.node, 3) +# The following is also supported +# node = initialize_array("input", "node", 3) node["id"] = [1, 2, 7] node["u_rated"] = [10.5e3, 10.5e3, 10.5e3] diff --git a/src/power_grid_model/core/data_handling.py b/src/power_grid_model/core/data_handling.py index d747be652..0dcf33b3b 100644 --- a/src/power_grid_model/core/data_handling.py +++ b/src/power_grid_model/core/data_handling.py @@ -21,8 +21,9 @@ class OutputType(Enum): """ The different supported output types: - - sym_output - - asym_output + - DatasetType.sym_output + - DatasetType.asym_output + - DatasetType.sc_output """ SYM_OUTPUT = DatasetType.sym_output diff --git a/tests/unit/test_dataset_definitions.py b/tests/unit/test_dataset_definitions.py index 79d096316..190889d92 100644 --- a/tests/unit/test_dataset_definitions.py +++ b/tests/unit/test_dataset_definitions.py @@ -7,17 +7,17 @@ from power_grid_model import ComponentType, DatasetType, power_grid_meta_data +def assert_data_type(pgm_meta_data_types, data_type): + pgm_types = [pgm_type for pgm_type in pgm_meta_data_types] + pgm_types.sort() + generated_types = [member.value for member in data_type] + generated_types.sort() + assert pgm_types == generated_types + + def test_power_grid_data_types(): - power_grid_data_types = [data_type for data_type in power_grid_meta_data] - gen_power_grid_data_types = [member.value for member in DatasetType] - power_grid_data_types.sort() - gen_power_grid_data_types.sort() - assert power_grid_data_types == gen_power_grid_data_types + assert_data_type(power_grid_meta_data, DatasetType) def test_power_grid_components(): - power_grid_components = [component for component in power_grid_meta_data[DatasetType.input]] - gen_power_grid_components = [member.value for member in ComponentType] - power_grid_components.sort() - gen_power_grid_components.sort() - assert power_grid_components == gen_power_grid_components + assert_data_type(power_grid_meta_data[DatasetType.input], ComponentType) From ad7187088a5f177d81d610168b720b17f7281a95 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Mon, 1 Jul 2024 10:01:39 +0200 Subject: [PATCH 43/47] Resolved review comments Signed-off-by: Santiago Figueroa --- .../src/power_grid_model/core/dataset_class_maps.py.jinja | 2 +- src/power_grid_model/core/dataset_definitions.py | 2 +- src/power_grid_model/core/power_grid_dataset.py | 2 +- src/power_grid_model/core/power_grid_model.py | 8 ++++---- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/code_generation/templates/src/power_grid_model/core/dataset_class_maps.py.jinja b/code_generation/templates/src/power_grid_model/core/dataset_class_maps.py.jinja index ed02a6b17..b50c12385 100644 --- a/code_generation/templates/src/power_grid_model/core/dataset_class_maps.py.jinja +++ b/code_generation/templates/src/power_grid_model/core/dataset_class_maps.py.jinja @@ -30,7 +30,7 @@ class DatasetType(str, Enum): {%- endfor %} @classmethod - def contains(cls, member): + def __contains__(cls, member): """ Check if member is part of the Enum. diff --git a/src/power_grid_model/core/dataset_definitions.py b/src/power_grid_model/core/dataset_definitions.py index e5d4beb98..2c021958b 100644 --- a/src/power_grid_model/core/dataset_definitions.py +++ b/src/power_grid_model/core/dataset_definitions.py @@ -29,7 +29,7 @@ class DatasetType(str, Enum): sc_output = "sc_output" @classmethod - def contains(cls, member): + def __contains__(cls, member): """ Check if member is part of the Enum. diff --git a/src/power_grid_model/core/power_grid_dataset.py b/src/power_grid_model/core/power_grid_dataset.py index 1e6d8ec90..c4b1b0882 100644 --- a/src/power_grid_model/core/power_grid_dataset.py +++ b/src/power_grid_model/core/power_grid_dataset.py @@ -198,7 +198,7 @@ def __new__( instance._mutable_dataset = MutableDatasetPtr() instance._buffer_views = [] - instance._dataset_type = dataset_type if dataset_type in list(DatasetType) else get_dataset_type(data) + instance._dataset_type = dataset_type if DatasetType.__contains__(dataset_type) else get_dataset_type(data) instance._schema = power_grid_meta_data[instance._dataset_type] if data: diff --git a/src/power_grid_model/core/power_grid_model.py b/src/power_grid_model/core/power_grid_model.py index 2922b5c82..fd41c5e54 100644 --- a/src/power_grid_model/core/power_grid_model.py +++ b/src/power_grid_model/core/power_grid_model.py @@ -6,7 +6,7 @@ Main power grid model class """ from enum import IntEnum -from typing import Any, Dict, List, Optional, Set, Type, Union +from typing import Dict, List, Optional, Set, Type, Union import numpy as np @@ -414,7 +414,7 @@ def calculate_power_flow( continue_on_batch_error: bool = False, decode_error: bool = True, tap_changing_strategy: Union[TapChangingStrategy, str] = TapChangingStrategy.disabled, - ) -> Dict[Any, np.ndarray]: + ) -> Dict[ComponentType, np.ndarray]: """ Calculate power flow once with the current model attributes. Or calculate in batch with the given update dataset in batch. @@ -507,7 +507,7 @@ def calculate_state_estimation( output_component_types: Optional[Union[Set[ComponentType], List[ComponentType]]] = None, continue_on_batch_error: bool = False, decode_error: bool = True, - ) -> Dict[Any, np.ndarray]: + ) -> Dict[ComponentType, np.ndarray]: """ Calculate state estimation once with the current model attributes. Or calculate in batch with the given update dataset in batch. @@ -594,7 +594,7 @@ def calculate_short_circuit( continue_on_batch_error: bool = False, decode_error: bool = True, short_circuit_voltage_scaling: Union[ShortCircuitVoltageScaling, str] = ShortCircuitVoltageScaling.maximum, - ) -> Dict[Any, np.ndarray]: + ) -> Dict[ComponentType, np.ndarray]: """ Calculate a short circuit once with the current model attributes. Or calculate in batch with the given update dataset in batch From aba3aa16107b7571be15be7fbeaf1b8f55dc31f8 Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Mon, 1 Jul 2024 10:26:19 +0200 Subject: [PATCH 44/47] Resolved review comments Signed-off-by: Santiago Figueroa --- .../core/dataset_definitions.py | 31 ++++++++++--------- .../core/power_grid_dataset.py | 2 +- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/src/power_grid_model/core/dataset_definitions.py b/src/power_grid_model/core/dataset_definitions.py index 2c021958b..6857e6de0 100644 --- a/src/power_grid_model/core/dataset_definitions.py +++ b/src/power_grid_model/core/dataset_definitions.py @@ -6,13 +6,27 @@ # This file is automatically generated. DO NOT modify it manually! -from enum import Enum +from enum import Enum, EnumMeta from typing import Any, Dict, Mapping # pylint: disable=invalid-name -class DatasetType(str, Enum): +class _MetaEnum(EnumMeta): + def __contains__(cls, member): + """ + Check if member is part of the Enum. + + Args: + member: Member to check. + + Returns: + bool: True if the member is part of the Enum, False otherwise. + """ + return member in cls.__members__.keys() + + +class DatasetType(str, Enum, metaclass=_MetaEnum): """ A DatasetType is the type of a :class:`Dataset` in power grid model. @@ -28,19 +42,6 @@ class DatasetType(str, Enum): update = "update" sc_output = "sc_output" - @classmethod - def __contains__(cls, member): - """ - Check if member is part of the Enum. - - Args: - member: Member to check. - - Returns: - bool: True if the member is part of the Enum, False otherwise. - """ - return member in cls.__members__ - class ComponentType(str, Enum): """ diff --git a/src/power_grid_model/core/power_grid_dataset.py b/src/power_grid_model/core/power_grid_dataset.py index c4b1b0882..0f48501f5 100644 --- a/src/power_grid_model/core/power_grid_dataset.py +++ b/src/power_grid_model/core/power_grid_dataset.py @@ -198,7 +198,7 @@ def __new__( instance._mutable_dataset = MutableDatasetPtr() instance._buffer_views = [] - instance._dataset_type = dataset_type if DatasetType.__contains__(dataset_type) else get_dataset_type(data) + instance._dataset_type = dataset_type if dataset_type in DatasetType else get_dataset_type(data) instance._schema = power_grid_meta_data[instance._dataset_type] if data: From 23b3808dc335cc584a227ceadae5b1c1536f954c Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Mon, 1 Jul 2024 10:39:50 +0200 Subject: [PATCH 45/47] Resolved review comments Signed-off-by: Santiago Figueroa --- .../core/dataset_class_maps.py.jinja | 44 ++++++++++--------- .../core/dataset_definitions.py | 4 ++ 2 files changed, 28 insertions(+), 20 deletions(-) diff --git a/code_generation/templates/src/power_grid_model/core/dataset_class_maps.py.jinja b/code_generation/templates/src/power_grid_model/core/dataset_class_maps.py.jinja index b50c12385..4fa699669 100644 --- a/code_generation/templates/src/power_grid_model/core/dataset_class_maps.py.jinja +++ b/code_generation/templates/src/power_grid_model/core/dataset_class_maps.py.jinja @@ -9,27 +9,14 @@ {%- set dataset_types = all_map.keys() %} {%- set components = all_map['input'].keys() %} -from enum import Enum +from enum import Enum, EnumMeta from typing import Any, Dict, Mapping # pylint: disable=invalid-name +# fmt: off -class DatasetType(str, Enum): - """ - A DatasetType is the type of a :class:`Dataset` in power grid model. - - - Examples: - - - DatasetType.input = "input" - - DatasetType.update = "update" - """ - - {%- for dataset_type in dataset_types %} - {{ dataset_type }} = "{{ dataset_type }}" - {%- endfor %} - - @classmethod +class _MetaEnum(EnumMeta): def __contains__(cls, member): """ Check if member is part of the Enum. @@ -40,7 +27,21 @@ class DatasetType(str, Enum): Returns: bool: True if the member is part of the Enum, False otherwise. """ - return member in cls.__members__ + return member in cls.__members__.keys() + + +class DatasetType(str, Enum, metaclass=_MetaEnum): + """ + A DatasetType is the type of a :class:`Dataset` in power grid model. + + - Examples: + + - DatasetType.input = "input" + - DatasetType.update = "update" + """ +{% for dataset_type in dataset_types %} + {{ dataset_type }} = "{{ dataset_type }}" +{%- endfor %} class ComponentType(str, Enum): @@ -52,10 +53,9 @@ class ComponentType(str, Enum): - ComponentType.node = "node" - ComponentType.line = "line" """ - - {%- for component in components %} +{% for component in components %} {{ component }} = "{{ component }}" - {%- endfor %} +{%- endfor %} # pylint: enable=invalid-name @@ -83,3 +83,7 @@ def _str_to_component_type(component: Any) -> ComponentType: def _map_to_component_types(data: Mapping[Any, Any]) -> Dict[ComponentType, Any]: """Helper function to map componenttype str keys to ComponentType.""" return {_str_to_component_type(key): value for key, value in data.items()} + + +# fmt: on + diff --git a/src/power_grid_model/core/dataset_definitions.py b/src/power_grid_model/core/dataset_definitions.py index 6857e6de0..d8f0c2a93 100644 --- a/src/power_grid_model/core/dataset_definitions.py +++ b/src/power_grid_model/core/dataset_definitions.py @@ -11,6 +11,7 @@ # pylint: disable=invalid-name +# fmt: off class _MetaEnum(EnumMeta): def __contains__(cls, member): @@ -97,3 +98,6 @@ def _str_to_component_type(component: Any) -> ComponentType: def _map_to_component_types(data: Mapping[Any, Any]) -> Dict[ComponentType, Any]: """Helper function to map componenttype str keys to ComponentType.""" return {_str_to_component_type(key): value for key, value in data.items()} + + +# fmt: on From eef66698a66acfd0745fb5c13415445f92432bbb Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Mon, 1 Jul 2024 10:48:29 +0200 Subject: [PATCH 46/47] Fixed test error Signed-off-by: Santiago Figueroa --- tests/unit/test_power_grid_model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_power_grid_model.py b/tests/unit/test_power_grid_model.py index fed2415ea..6a9227edb 100644 --- a/tests/unit/test_power_grid_model.py +++ b/tests/unit/test_power_grid_model.py @@ -153,8 +153,8 @@ def test_batch_calculation_error_continue(model: PowerGridModel, case_data): np.allclose(error.succeeded_scenarios, [0]) assert "The id cannot be found:" in error.error_messages[0] # assert value result for scenario 0 - result = {"node": result["node"][error.succeeded_scenarios, :]} - expected_result = {ComponentType.node: case_data["output_batch"]["node"][error.succeeded_scenarios, :]} + result = {ComponentType.node: result[ComponentType.node][error.succeeded_scenarios, :]} + expected_result = {ComponentType.node: case_data["output_batch"][ComponentType.node][error.succeeded_scenarios, :]} compare_result(result, expected_result, rtol=0.0, atol=1e-8) # general error before the batch with pytest.raises(PowerGridError, match="The calculation method is invalid for this calculation!"): From db2c85caacca3da35001a1a8da30da1a16d628ba Mon Sep 17 00:00:00 2001 From: Santiago Figueroa Date: Mon, 1 Jul 2024 11:35:22 +0200 Subject: [PATCH 47/47] Minor version bump Signed-off-by: Santiago Figueroa --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 625934097..2e0e38c63 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.8 +1.9