diff --git a/src/power_grid_model_io/config/excel/vision_en.yaml b/src/power_grid_model_io/config/excel/vision_en.yaml index ba9219a3..9da86b49 100644 --- a/src/power_grid_model_io/config/excel/vision_en.yaml +++ b/src/power_grid_model_io/config/excel/vision_en.yaml @@ -424,6 +424,11 @@ grid: extra: - ID - Name + filters: + - power_grid_model_io.functions.filters.exclude_all_columns_empty_or_zero: + cols: + - Load.P + - Load.Q sym_gen: - id: auto_id: @@ -444,6 +449,11 @@ grid: extra: - ID - Name + filters: + - power_grid_model_io.functions.filters.exclude_all_columns_empty_or_zero: + cols: + - Generation.P + - Generation.Q - id: auto_id: name: pv_generation @@ -466,6 +476,10 @@ grid: extra: - ID - Name + filters: + - power_grid_model_io.functions.filters.exclude_all_columns_empty_or_zero: + cols: + - PV.Pnom Sources: source: id: diff --git a/src/power_grid_model_io/config/excel/vision_en_9_7.yaml b/src/power_grid_model_io/config/excel/vision_en_9_7.yaml index 62839ee5..e7c59868 100644 --- a/src/power_grid_model_io/config/excel/vision_en_9_7.yaml +++ b/src/power_grid_model_io/config/excel/vision_en_9_7.yaml @@ -425,6 +425,11 @@ grid: extra: - ID - Name + filters: + - power_grid_model_io.functions.filters.exclude_all_columns_empty_or_zero: + cols: + - Load.P + - Load.Q sym_gen: - id: auto_id: @@ -445,6 +450,11 @@ grid: extra: - ID - Name + filters: + - power_grid_model_io.functions.filters.exclude_all_columns_empty_or_zero: + cols: + - Generation.P + - Generation.Q - id: auto_id: name: pv_generation @@ -467,6 +477,10 @@ grid: extra: - ID - Name + filters: + - power_grid_model_io.functions.filters.exclude_all_columns_empty_or_zero: + cols: + - PV.Pnom Sources: source: id: diff --git a/src/power_grid_model_io/config/excel/vision_nl.yaml b/src/power_grid_model_io/config/excel/vision_nl.yaml index 92754770..d1981235 100644 --- a/src/power_grid_model_io/config/excel/vision_nl.yaml +++ b/src/power_grid_model_io/config/excel/vision_nl.yaml @@ -415,6 +415,11 @@ grid: extra: - ID - Naam + filters: + - power_grid_model_io.functions.filters.exclude_all_columns_empty_or_zero: + cols: + - Belasting.P + - Belasting.Q sym_gen: - id: auto_id: @@ -435,6 +440,11 @@ grid: extra: - ID - Naam + filters: + - power_grid_model_io.functions.filters.exclude_all_columns_empty_or_zero: + cols: + - Opwekking.P + - Opwekking.Q - id: auto_id: name: pv_generation @@ -457,6 +467,10 @@ grid: extra: - ID - Naam + filters: + - power_grid_model_io.functions.filters.exclude_all_columns_empty_or_zero: + cols: + - PV.Pnom Netvoedingen: source: id: diff --git a/src/power_grid_model_io/converters/tabular_converter.py b/src/power_grid_model_io/converters/tabular_converter.py index 29c5871e..349d559e 100644 --- a/src/power_grid_model_io/converters/tabular_converter.py +++ b/src/power_grid_model_io/converters/tabular_converter.py @@ -178,7 +178,12 @@ def _convert_table_to_component( if table not in data: return None - n_records = len(data[table]) + if "filters" in attributes: + table_mask = self._parse_table_filters(data=data, table=table, filtering_functions=attributes["filters"]) + else: + table_mask = None + + n_records = np.sum(table_mask) if table_mask is not None else len(data[table]) try: pgm_data = initialize_array(data_type=data_type, component_type=component, shape=n_records) @@ -189,7 +194,8 @@ def _convert_table_to_component( raise KeyError(f"No mapping for the attribute 'id' for '{component}s'!") # Make sure that the "id" column is always parsed first (at least before "extra" is parsed) - sorted_attributes = sorted(attributes.items(), key=lambda x: "" if x[0] == "id" else x[0]) + attributes_without_filter = {k: v for k, v in attributes.items() if k != "filters"} + sorted_attributes = sorted(attributes_without_filter.items(), key=lambda x: "" if x[0] == "id" else x[0]) for attr, col_def in sorted_attributes: self._convert_col_def_to_attribute( @@ -199,11 +205,23 @@ def _convert_table_to_component( component=component, attr=attr, col_def=col_def, + table_mask=table_mask, extra_info=extra_info, ) return pgm_data + def _parse_table_filters(self, data: TabularData, table: str, filtering_functions: Any) -> Optional[np.ndarray]: + if not isinstance(data[table], pd.DataFrame): + return None + + table_mask = np.ones(len(data[table]), dtype=bool) + for filtering_fn in filtering_functions: + for fn_name, kwargs in filtering_fn.items(): + fn_ptr = get_function(fn_name) + table_mask &= data[table].apply(fn_ptr, axis=1, **kwargs).values + return table_mask + # pylint: disable = too-many-arguments def _convert_col_def_to_attribute( self, @@ -213,6 +231,7 @@ def _convert_col_def_to_attribute( component: str, attr: str, col_def: Any, + table_mask: Optional[np.ndarray], extra_info: Optional[ExtraInfo], ): """This function updates one of the attributes of pgm_data, based on the corresponding table/column in a tabular @@ -242,7 +261,7 @@ def _convert_col_def_to_attribute( """ # To avoid mistakes, the attributes in the mapping should exist. There is one extra attribute called # 'extra' in which extra information can be captured. - if attr not in pgm_data.dtype.names and attr != "extra": + if attr not in pgm_data.dtype.names and attr not in ["extra", "filters"]: attrs = ", ".join(pgm_data.dtype.names) raise KeyError(f"Could not find attribute '{attr}' for '{component}s'. (choose from: {attrs})") @@ -250,12 +269,19 @@ def _convert_col_def_to_attribute( # Extra info must be linked to the object IDs, therefore the uuids should be known before extra info can # be parsed. Before this for loop, it is checked that "id" exists and it is placed at the front. self._handle_extra_info( - data=data, table=table, col_def=col_def, uuids=pgm_data["id"], extra_info=extra_info + data=data, + table=table, + col_def=col_def, + uuids=pgm_data["id"], + table_mask=table_mask, + extra_info=extra_info, ) # Extra info should not be added to the numpy arrays, so let's continue to the next attribute return - attr_data = self._parse_col_def(data=data, table=table, col_def=col_def, extra_info=extra_info) + attr_data = self._parse_col_def( + data=data, table=table, table_mask=table_mask, col_def=col_def, extra_info=extra_info + ) if len(attr_data.columns) != 1: raise ValueError(f"DataFrame for {component}.{attr} should contain a single column ({attr_data.columns})") @@ -268,6 +294,7 @@ def _handle_extra_info( table: str, col_def: Any, uuids: np.ndarray, + table_mask: Optional[np.ndarray], extra_info: Optional[ExtraInfo], ) -> None: """This function can extract extra info from the tabular data and store it in the extra_info dict @@ -292,7 +319,9 @@ def _handle_extra_info( if extra_info is None: return - extra = self._parse_col_def(data=data, table=table, col_def=col_def, extra_info=None).to_dict(orient="records") + extra = self._parse_col_def( + data=data, table=table, table_mask=table_mask, col_def=col_def, extra_info=None + ).to_dict(orient="records") for i, xtr in zip(uuids, extra): xtr = { k[0] if isinstance(k, tuple) else k: v @@ -339,7 +368,12 @@ def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfo]) -> Tab return TabularData(logger=self._log, **data) def _parse_col_def( - self, data: TabularData, table: str, col_def: Any, extra_info: Optional[ExtraInfo] + self, + data: TabularData, + table: str, + col_def: Any, + table_mask: Optional[np.ndarray], + extra_info: Optional[ExtraInfo], ) -> pd.DataFrame: """Interpret the column definition and extract/convert/create the data as a pandas DataFrame. @@ -353,17 +387,21 @@ def _parse_col_def( """ if isinstance(col_def, (int, float)): - return self._parse_col_def_const(data=data, table=table, col_def=col_def) + return self._parse_col_def_const(data=data, table=table, col_def=col_def, table_mask=table_mask) if isinstance(col_def, str): - return self._parse_col_def_column_name(data=data, table=table, col_def=col_def) + return self._parse_col_def_column_name(data=data, table=table, col_def=col_def, table_mask=table_mask) if isinstance(col_def, dict): - return self._parse_col_def_filter(data=data, table=table, col_def=col_def, extra_info=extra_info) + return self._parse_col_def_filter( + data=data, table=table, table_mask=table_mask, col_def=col_def, extra_info=extra_info + ) if isinstance(col_def, list): - return self._parse_col_def_composite(data=data, table=table, col_def=col_def) + return self._parse_col_def_composite(data=data, table=table, col_def=col_def, table_mask=table_mask) raise TypeError(f"Invalid column definition: {col_def}") @staticmethod - def _parse_col_def_const(data: TabularData, table: str, col_def: Union[int, float]) -> pd.DataFrame: + def _parse_col_def_const( + data: TabularData, table: str, col_def: Union[int, float], table_mask: Optional[np.ndarray] = None + ) -> pd.DataFrame: """Create a single column pandas DataFrame containing the const value. Args: @@ -376,9 +414,15 @@ def _parse_col_def_const(data: TabularData, table: str, col_def: Union[int, floa """ assert isinstance(col_def, (int, float)) - return pd.DataFrame([col_def] * len(data[table])) - - def _parse_col_def_column_name(self, data: TabularData, table: str, col_def: str) -> pd.DataFrame: + const_df = pd.DataFrame([col_def] * len(data[table])) + if table_mask is not None: + # Required to retain indices before filter + return const_df[table_mask] + return const_df + + def _parse_col_def_column_name( + self, data: TabularData, table: str, col_def: str, table_mask: Optional[np.ndarray] = None + ) -> pd.DataFrame: """Extract a column from the data. If the column doesn't exist, check if the col_def is a special float value, like 'inf'. If that's the case, create a single column pandas DataFrame containing the const value. @@ -391,13 +435,18 @@ def _parse_col_def_column_name(self, data: TabularData, table: str, col_def: str """ assert isinstance(col_def, str) + table_data = data[table] + if table_mask is not None: + table_data = table_data[table_mask] # If multiple columns are given in col_def, return the first column that exists in the dataset columns = [col_name.strip() for col_name in col_def.split("|")] for col_name in columns: if col_name in table_data or col_name == "index": col_data = data.get_column(table_name=table, column_name=col_name) + if table_mask is not None: + col_data = col_data[table_mask] col_data = self._apply_multiplier(table=table, column=col_name, data=col_data) return pd.DataFrame(col_data) @@ -408,7 +457,7 @@ def _parse_col_def_column_name(self, data: TabularData, table: str, col_def: str columns_str = " and ".join(f"'{col_name}'" for col_name in columns) raise KeyError(f"Could not find column {columns_str} on table '{table}'") - return self._parse_col_def_const(data=data, table=table, col_def=const_value) + return self._parse_col_def_const(data=data, table=table, col_def=const_value, table_mask=table_mask) def _apply_multiplier(self, table: str, column: str, data: pd.Series) -> pd.Series: if self._multipliers is None: @@ -421,7 +470,14 @@ def _apply_multiplier(self, table: str, column: str, data: pd.Series) -> pd.Seri return data def _parse_reference( - self, data: TabularData, table: str, other_table: str, query_column: str, key_column: str, value_column: str + self, + data: TabularData, + table: str, + other_table: str, + query_column: str, + key_column: str, + value_column: str, + table_mask: Optional[np.ndarray], ) -> pd.DataFrame: """ Find and extract a column from a different table. @@ -437,15 +493,20 @@ def _parse_reference( Returns: """ - queries = self._parse_col_def_column_name(data=data, table=table, col_def=query_column) - keys = self._parse_col_def_column_name(data=data, table=other_table, col_def=key_column) - values = self._parse_col_def_column_name(data=data, table=other_table, col_def=value_column) + queries = self._parse_col_def_column_name(data=data, table=table, col_def=query_column, table_mask=table_mask) + keys = self._parse_col_def_column_name(data=data, table=other_table, col_def=key_column, table_mask=None) + values = self._parse_col_def_column_name(data=data, table=other_table, col_def=value_column, table_mask=None) other = pd.concat([keys, values], axis=1) result = queries.merge(other, how="left", left_on=query_column, right_on=key_column) return result[[value_column]] def _parse_col_def_filter( - self, data: TabularData, table: str, col_def: Dict[str, Any], extra_info: Optional[ExtraInfo] + self, + data: TabularData, + table: str, + col_def: Dict[str, Any], + table_mask: Optional[np.ndarray], + extra_info: Optional[ExtraInfo], ) -> pd.DataFrame: """ Parse column filters like 'auto_id', 'reference', 'function', etc @@ -464,6 +525,7 @@ def _parse_col_def_filter( col_data = self._parse_auto_id( data=data, table=table, + table_mask=table_mask, ref_table=sub_def.get("table"), ref_name=sub_def.get("name"), key_col_def=sub_def["key"], @@ -481,15 +543,20 @@ def _parse_col_def_filter( return self._parse_reference( data=data, table=table, + table_mask=table_mask, other_table=sub_def["other_table"], query_column=sub_def["query_column"], key_column=sub_def["key_column"], value_column=sub_def["value_column"], ) elif isinstance(sub_def, list): - col_data = self._parse_pandas_function(data=data, table=table, fn_name=name, col_def=sub_def) + col_data = self._parse_pandas_function( + data=data, table=table, table_mask=table_mask, fn_name=name, col_def=sub_def + ) elif isinstance(sub_def, dict): - col_data = self._parse_function(data=data, table=table, function=name, col_def=sub_def) + col_data = self._parse_function( + data=data, table=table, table_mask=table_mask, function=name, col_def=sub_def + ) else: raise TypeError(f"Invalid {name} definition: {sub_def}") data_frames.append(col_data) @@ -502,6 +569,7 @@ def _parse_auto_id( ref_table: Optional[str], ref_name: Optional[str], key_col_def: Union[str, List[str], Dict[str, str]], + table_mask: Optional[np.ndarray], extra_info: Optional[ExtraInfo], ) -> pd.DataFrame: """ @@ -535,7 +603,9 @@ def _parse_auto_id( else: raise TypeError(f"Invalid key definition type '{type(key_col_def).__name__}': {key_col_def}") - col_data = self._parse_col_def(data=data, table=table, col_def=key_col_def, extra_info=None) + col_data = self._parse_col_def( + data=data, table=table, table_mask=table_mask, col_def=key_col_def, extra_info=None + ) def auto_id(row: np.ndarray): key = dict(zip(key_names, row)) @@ -558,7 +628,9 @@ def auto_id(row: np.ndarray): return col_data.apply(auto_id, axis=1, raw=True) - def _parse_pandas_function(self, data: TabularData, table: str, fn_name: str, col_def: List[Any]) -> pd.DataFrame: + def _parse_pandas_function( + self, data: TabularData, table: str, fn_name: str, col_def: List[Any], table_mask: Optional[np.ndarray] + ) -> pd.DataFrame: """Special vectorized functions. Args: @@ -576,7 +648,7 @@ def _parse_pandas_function(self, data: TabularData, table: str, fn_name: str, co if fn_name == "multiply": fn_name = "prod" - col_data = self._parse_col_def(data=data, table=table, col_def=col_def, extra_info=None) + col_data = self._parse_col_def(data=data, table=table, col_def=col_def, table_mask=table_mask, extra_info=None) try: fn_ptr = getattr(col_data, fn_name) @@ -599,7 +671,9 @@ def _parse_pandas_function(self, data: TabularData, table: str, fn_name: str, co return pd.DataFrame(fn_ptr(axis=1)) - def _parse_function(self, data: TabularData, table: str, function: str, col_def: Dict[str, Any]) -> pd.DataFrame: + def _parse_function( + self, data: TabularData, table: str, function: str, col_def: Dict[str, Any], table_mask: Optional[np.ndarray] + ) -> pd.DataFrame: """Import the function by name and apply it to each row. Args: @@ -616,7 +690,7 @@ def _parse_function(self, data: TabularData, table: str, function: str, col_def: fn_ptr = get_function(function) key_words = list(col_def.keys()) sub_def = list(col_def.values()) - col_data = self._parse_col_def(data=data, table=table, col_def=sub_def, extra_info=None) + col_data = self._parse_col_def(data=data, table=table, col_def=sub_def, table_mask=table_mask, extra_info=None) if col_data.empty: raise ValueError(f"Cannot apply function {function} to an empty DataFrame") @@ -624,7 +698,9 @@ def _parse_function(self, data: TabularData, table: str, function: str, col_def: col_data = col_data.apply(lambda row, fn=fn_ptr: fn(**dict(zip(key_words, row))), axis=1, raw=True) return pd.DataFrame(col_data) - def _parse_col_def_composite(self, data: TabularData, table: str, col_def: list) -> pd.DataFrame: + def _parse_col_def_composite( + self, data: TabularData, table: str, col_def: list, table_mask: Optional[np.ndarray] + ) -> pd.DataFrame: """Select multiple columns (each is created from a column definition) and return them as a new DataFrame. Args: @@ -636,7 +712,10 @@ def _parse_col_def_composite(self, data: TabularData, table: str, col_def: list) """ assert isinstance(col_def, list) - columns = [self._parse_col_def(data=data, table=table, col_def=sub_def, extra_info=None) for sub_def in col_def] + columns = [ + self._parse_col_def(data=data, table=table, col_def=sub_def, table_mask=table_mask, extra_info=None) + for sub_def in col_def + ] return pd.concat(columns, axis=1) def _get_id(self, table: str, key: Mapping[str, int], name: Optional[str]) -> int: diff --git a/src/power_grid_model_io/functions/filters.py b/src/power_grid_model_io/functions/filters.py new file mode 100644 index 00000000..86ebe073 --- /dev/null +++ b/src/power_grid_model_io/functions/filters.py @@ -0,0 +1,40 @@ +# SPDX-FileCopyrightText: Contributors to the Power Grid Model project +# +# SPDX-License-Identifier: MPL-2.0 +""" +These functions can be used in the mapping files to apply filter functions to vision data +""" + +from typing import List, Union + +import pandas as pd + +from power_grid_model_io.functions import has_value + + +def exclude_empty(row: pd.Series, col: str) -> bool: + """ + filter out empty + """ + result = has_value(row[col]) + if isinstance(result, pd.Series): + return result.item() + return result + + +def exclude_value(row: pd.Series, col: str, value: Union[float, str]) -> bool: + """ + filter out by match value + """ + result = row[col] != value + if isinstance(result, pd.Series): + return result.item() + return result + + +def exclude_all_columns_empty_or_zero(row: pd.Series, cols: List[str]) -> bool: + """ + filter out empty or zero values in multiple columns. + This is same as not all(not exclude_value or not exclude_empty) + """ + return any(exclude_value(row, col, 0) and exclude_empty(row, col) for col in cols) diff --git a/tests/data/vision/pgm_input_data_en.json b/tests/data/vision/pgm_input_data_en.json index d0d7f11c..7b5b0618 100644 --- a/tests/data/vision/pgm_input_data_en.json +++ b/tests/data/vision/pgm_input_data_en.json @@ -6,49 +6,52 @@ {"id": 2, "id_reference": {"table": "Nodes", "key": {"Number": 3}}, "Name": "node3"}, {"id": 3, "id_reference": {"table": "Nodes", "key": {"Number": 4}}, "Name": "node4"}, {"id": 4, "id_reference": {"table": "Nodes", "key": {"Number": 5}}}, - {"id": 12, "id_reference": {"table": "Transformer loads", "name": "internal_node", "key": {"Node.Number": 3, "Subnumber": 4}}, "Name": "transformer_load_1"} + {"id": 13, "id_reference": {"table": "Transformer loads", "name": "internal_node", "key": {"Node.Number": 3, "Subnumber": 4}}, "Name": "transformer_load_2"}, + {"id": 14, "id_reference": {"table": "Transformer loads", "name": "internal_node", "key": {"Node.Number": 3, "Subnumber": 2}}, "Name": "transformer_load_1"} ], "line": [ - {"id": 5, "from_node": 0, "to_node": 1, "id_reference": {"table": "Cables", "key": {"Number": 1}}, "Name": "cable1"}, - {"id": 6, "from_node": 0, "to_node": 1, "id_reference": {"table": "Lines", "key": {"Number": 1}}, "Name": "line1"}, - {"id": 8, "from_node": 0, "to_node": 1, "id_reference": {"table": "Reactance coils", "key": {"Number": 1}}, "Name": "rcoil1"} + {"id": 5, "from_node": 0, "to_node": 1, "id_reference": {"table": "Cables", "key": {"Number": 1}}, "Name": "cable1"}, + {"id": 6, "from_node": 0, "to_node": 1, "id_reference": {"table": "Lines", "key": {"Number": 1}}, "Name": "line1"}, + {"id": 8, "from_node": 0, "to_node": 1, "id_reference": {"table": "Reactance coils", "key": {"Number": 1}}, "Name": "rcoil1"} ], "link": [ - {"id": 7, "from_node": 0, "to_node": 1, "id_reference": {"table": "Links", "key": {"Number": 1}}, "Name": "link1"} + {"id": 7, "from_node": 0, "to_node": 1, "id_reference": {"table": "Links", "key": {"Number": 1}}, "Name": "link1"} ], "transformer": [ - {"id": 9, "from_node": 1, "to_node": 2, "id_reference": {"table": "Transformers", "key": {"Number": 1}}, "Name": "transformer1"}, - {"id": 10, "from_node": 1, "to_node": 3, "id_reference": {"table": "Special transformers", "key": {"Number": 1}}, "Name": "special_trans1"}, - {"id": 11, "from_node": 2, "to_node": 12, "id_reference": {"table": "Transformer loads", "name": "transformer", "key": {"Node.Number": 3, "Subnumber": 4}}, "Name": "transformer_load_1"} + {"id": 9, "from_node": 1, "to_node": 2, "id_reference": {"table": "Transformers", "key": {"Number": 1}}, "Name": "transformer1"}, + {"id": 10, "from_node": 1, "to_node": 3, "id_reference": {"table": "Special transformers", "key": {"Number": 1}}, "Name": "special_trans1"}, + {"id": 11, "from_node": 2, "to_node": 13, "id_reference": {"table": "Transformer loads", "name": "transformer", "key": {"Node.Number": 3, "Subnumber": 4}}, "Name": "transformer_load_2"}, + {"id": 12, "from_node": 2, "to_node": 14, "id_reference": {"table": "Transformer loads", "name": "transformer", "key": {"Node.Number": 3, "Subnumber": 2}}, "Name": "transformer_load_1"} ], "sym_load": [ - {"id": 13, "node": 12, "id_reference": {"table": "Transformer loads", "name": "load", "key": {"Node.Number": 3, "Subnumber": 4}}, "Name": "transformer_load_1"}, - {"id": 19, "node": 2, "id_reference": {"table": "Loads", "key": {"Node.Number": 3, "Subnumber": 6}}, "Name": "load1"} + {"id": 15, "node": 13, "id_reference": {"table": "Transformer loads", "name": "load", "key": {"Node.Number": 3, "Subnumber": 4}}, "Name": "transformer_load_2"}, + {"id": 16, "node": 14, "id_reference": {"table": "Transformer loads", "name": "load", "key": {"Node.Number": 3, "Subnumber": 2}}, "Name": "transformer_load_1"}, + {"id": 22, "node": 2, "id_reference": {"table": "Loads", "key": {"Node.Number": 3, "Subnumber": 6}}, "Name": "load1"} ], "sym_gen": [ - {"id": 14, "node": 12, "id_reference": {"table": "Transformer loads", "name": "generation", "key": {"Node.Number": 3, "Subnumber": 4}}, "Name": "transformer_load_1"}, - {"id": 15, "node": 12, "id_reference": {"table": "Transformer loads", "name": "pv_generation", "key": {"Node.Number": 3, "Subnumber": 4}}, "Name": "transformer_load_1"}, - {"id": 17, "node": 3, "id_reference": {"table": "Synchronous generators", "key": {"Node.Number": 4, "Subnumber": 2}}, "Name": "syngen1"}, - {"id": 18, "node": 2, "id_reference": {"table": "Wind turbines", "key": {"Node.Number": 3, "Subnumber": 3}}, "Name": "wind1"}, - {"id": 23, "node": 2, "id_reference": {"table": "Pvs", "key": {"Node.Number": 3, "Subnumber": 1}}, "Name": "pv1"} - ], - "three_winding_transformer": - [ - {"id": 24, "node_1": 1, "node_2": 2, "node_3": 4, "status_1": 1, "status_2": 1, "status_3": 1, "u1": 11000.0, "u2": 400.0, "u3": 400.0 , "sn_1": 30000000.0, "sn_2": 40000000.0, "sn_3": 35000000.0, "uk_12": 0.01, "uk_13": 0.02, "uk_23": 0.03, "pk_12": 10000.0, "pk_13": 20000.0, "pk_23": 30000.0, "i0": 0.00171428, "p0": 60000.0, "winding_1": 2, "winding_2": 1, "winding_3": 1, "clock_12": 5, "clock_13": 7, "tap_pos": 2, "tap_side": 2, "tap_min": -7, "tap_max": 8, "tap_nom": 2, "tap_size": 10.0, "id_reference": {"table": "Three winding transformers", "key": {"Number": 1}}} + {"id": 17, "node": 14, "id_reference": {"table": "Transformer loads", "name": "generation", "key": {"Node.Number": 3, "Subnumber": 2}}, "Name": "transformer_load_1"}, + {"id": 18, "node": 14, "id_reference": {"table": "Transformer loads", "name": "pv_generation", "key": {"Node.Number": 3, "Subnumber": 2}}, "Name": "transformer_load_1"}, + {"id": 20, "node": 3, "id_reference": {"table": "Synchronous generators", "key": {"Node.Number": 4, "Subnumber": 2}}, "Name": "syngen1"}, + {"id": 21, "node": 2, "id_reference": {"table": "Wind turbines", "key": {"Node.Number": 3, "Subnumber": 3}}, "Name": "wind1"}, + {"id": 26, "node": 2, "id_reference": {"table": "Pvs", "key": {"Node.Number": 3, "Subnumber": 1}}, "Name": "pv1"} ], "source": [ - {"id": 16, "node": 0, "id_reference": {"table": "Sources", "key": {"Node.Number": 1, "Subnumber": 1}}, "Name": "source1"} + {"id": 19, "node": 0, "id_reference": {"table": "Sources", "key": {"Node.Number": 1, "Subnumber": 1}}, "Name": "source1"} ], "shunt": [ - {"id": 20, "node": 2, "id_reference": {"table": "Zigzag transformers", "key": {"Node.Number": 3, "Subnumber": 5}}, "Name": "zztrans1"}, - {"id": 21, "node": 3, "id_reference": {"table": "Capacitors", "key": {"Node.Number": 4, "Subnumber": 9}}, "Name": "shunt1"}, - {"id": 22, "node": 3, "id_reference": {"table": "Reactors", "key": {"Node.Number": 4, "Subnumber": 1}}} + {"id": 23, "node": 2, "id_reference": {"table": "Zigzag transformers", "key": {"Node.Number": 3, "Subnumber": 5}}, "Name": "zztrans1"}, + {"id": 24, "node": 3, "id_reference": {"table": "Capacitors", "key": {"Node.Number": 4, "Subnumber": 9}}, "Name": "shunt1"}, + {"id": 25, "node": 3, "id_reference": {"table": "Reactors", "key": {"Node.Number": 4, "Subnumber": 1}}} + ], + "three_winding_transformer": + [ + {"id": 27, "node_1": 1, "node_2": 2, "node_3": 4, "id_reference": {"table": "Three winding transformers", "key": {"Number": 1}}} ] } diff --git a/tests/data/vision/pgm_input_data_nl.json b/tests/data/vision/pgm_input_data_nl.json index bf030d45..e87c5fad 100644 --- a/tests/data/vision/pgm_input_data_nl.json +++ b/tests/data/vision/pgm_input_data_nl.json @@ -6,49 +6,52 @@ {"id": 2, "id_reference": {"table": "Knooppunten", "key": {"Nummer": 3}}, "Naam": "node3"}, {"id": 3, "id_reference": {"table": "Knooppunten", "key": {"Nummer": 4}}, "Naam": "node4"}, {"id": 4, "id_reference": {"table": "Knooppunten", "key": {"Nummer": 5}}}, - {"id": 12, "id_reference": {"table": "Transformatorbelastingen", "name": "internal_node", "key": {"Knooppunt.Nummer": 3, "Subnummer": 4}}, "Naam": "transformer_load_1"} + {"id": 13, "id_reference": {"table": "Transformatorbelastingen", "name": "internal_node", "key": {"Knooppunt.Nummer": 3, "Subnummer": 4}}, "Naam": "transformer_load_2"}, + {"id": 14, "id_reference": {"table": "Transformatorbelastingen", "name": "internal_node", "key": {"Knooppunt.Nummer": 3, "Subnummer": 2}}, "Naam": "transformer_load_1"} ], "line": [ - {"id": 5, "from_node": 0, "to_node": 1, "id_reference": {"table": "Kabels", "key": {"Nummer": 1}}, "Naam": "cable1"}, - {"id": 6, "from_node": 0, "to_node": 1, "id_reference": {"table": "Verbindingen", "key": {"Nummer": 1}}, "Naam": "line1"}, - {"id": 8, "from_node": 0, "to_node": 1, "id_reference": {"table": "Smoorspoelen", "key": {"Nummer": 1}}, "Naam": "rcoil1"} + {"id": 5, "from_node": 0, "to_node": 1, "id_reference": {"table": "Kabels", "key": {"Nummer": 1}}, "Naam": "cable1"}, + {"id": 6, "from_node": 0, "to_node": 1, "id_reference": {"table": "Verbindingen", "key": {"Nummer": 1}}, "Naam": "line1"}, + {"id": 8, "from_node": 0, "to_node": 1, "id_reference": {"table": "Smoorspoelen", "key": {"Nummer": 1}}, "Naam": "rcoil1"} ], "link": [ - {"id": 7, "from_node": 0, "to_node": 1, "id_reference": {"table": "Links", "key": {"Nummer": 1}}, "Naam": "link1"} + {"id": 7, "from_node": 0, "to_node": 1, "id_reference": {"table": "Links", "key": {"Nummer": 1}}, "Naam": "link1"} ], "transformer": [ - {"id": 9, "from_node": 1, "to_node": 2, "id_reference": {"table": "Transformatoren", "key": {"Nummer": 1}}, "Naam": "transformer1"}, - {"id": 10, "from_node": 1, "to_node": 3, "id_reference": {"table": "Speciale transformatoren", "key": {"Nummer": 1}}, "Naam": "special_trans1"}, - {"id": 11, "from_node": 2, "to_node": 12, "id_reference": {"table": "Transformatorbelastingen", "name": "transformer", "key": {"Knooppunt.Nummer": 3, "Subnummer": 4}}, "Naam": "transformer_load_1"} + {"id": 9, "from_node": 1, "to_node": 2, "id_reference": {"table": "Transformatoren", "key": {"Nummer": 1}}, "Naam": "transformer1"}, + {"id": 10, "from_node": 1, "to_node": 3, "id_reference": {"table": "Speciale transformatoren", "key": {"Nummer": 1}}, "Naam": "special_trans1"}, + {"id": 11, "from_node": 2, "to_node": 13, "id_reference": {"table": "Transformatorbelastingen", "name": "transformer", "key": {"Knooppunt.Nummer": 3, "Subnummer": 4}}, "Naam": "transformer_load_2"}, + {"id": 12, "from_node": 2, "to_node": 14, "id_reference": {"table": "Transformatorbelastingen", "name": "transformer", "key": {"Knooppunt.Nummer": 3, "Subnummer": 2}}, "Naam": "transformer_load_1"} ], "sym_load": [ - {"id": 13, "node": 12, "id_reference": {"table": "Transformatorbelastingen", "name": "load", "key": {"Knooppunt.Nummer": 3, "Subnummer": 4}}, "Naam": "transformer_load_1"}, - {"id": 19, "node": 2, "id_reference": {"table": "Belastingen", "key": {"Knooppunt.Nummer": 3, "Subnummer": 6}}, "Naam": "load1"} + {"id": 15, "node": 13, "id_reference": {"table": "Transformatorbelastingen", "name": "load", "key": {"Knooppunt.Nummer": 3, "Subnummer": 4}}, "Naam": "transformer_load_2"}, + {"id": 16, "node": 14, "id_reference": {"table": "Transformatorbelastingen", "name": "load", "key": {"Knooppunt.Nummer": 3, "Subnummer": 2}}, "Naam": "transformer_load_1"}, + {"id": 22, "node": 2, "id_reference": {"table": "Belastingen", "key": {"Knooppunt.Nummer": 3, "Subnummer": 6}}, "Naam": "load1"} ], "sym_gen": [ - {"id": 14, "node": 12, "id_reference": {"table": "Transformatorbelastingen", "name": "generation", "key": {"Knooppunt.Nummer": 3, "Subnummer": 4}}, "Naam": "transformer_load_1"}, - {"id": 15, "node": 12, "id_reference": {"table": "Transformatorbelastingen", "name": "pv_generation", "key": {"Knooppunt.Nummer": 3, "Subnummer": 4}}, "Naam": "transformer_load_1"}, - {"id": 17, "node": 3, "id_reference": {"table": "Synchrone generatoren", "key": {"Knooppunt.Nummer": 4, "Subnummer": 2}}, "Naam": "syngen1"}, - {"id": 18, "node": 2, "id_reference": {"table": "Windturbines", "key": {"Knooppunt.Nummer": 3, "Subnummer": 3}}, "Naam": "wind1"}, - {"id": 23, "node": 2, "id_reference": {"table": "Pv's", "key": {"Knooppunt.Nummer": 3, "Subnummer": 1}}, "Naam": "pv1"} - ], - "three_winding_transformer": - [ - {"id": 24, "node_1": 1, "node_2": 2, "node_3": 4, "id_reference": {"table": "Driewikkelingstransformatoren", "key": {"Nummer": 1}}} + {"id": 17, "node": 14, "id_reference": {"table": "Transformatorbelastingen", "name": "generation", "key": {"Knooppunt.Nummer": 3, "Subnummer": 2}}, "Naam": "transformer_load_1"}, + {"id": 18, "node": 14, "id_reference": {"table": "Transformatorbelastingen", "name": "pv_generation", "key": {"Knooppunt.Nummer": 3, "Subnummer": 2}}, "Naam": "transformer_load_1"}, + {"id": 20, "node": 3, "id_reference": {"table": "Synchrone generatoren", "key": {"Knooppunt.Nummer": 4, "Subnummer": 2}}, "Naam": "syngen1"}, + {"id": 21, "node": 2, "id_reference": {"table": "Windturbines", "key": {"Knooppunt.Nummer": 3, "Subnummer": 3}}, "Naam": "wind1"}, + {"id": 26, "node": 2, "id_reference": {"table": "Pv's", "key": {"Knooppunt.Nummer": 3, "Subnummer": 1}}, "Naam": "pv1"} ], "source": [ - {"id": 16, "node": 0, "id_reference": {"table": "Netvoedingen", "key": {"Knooppunt.Nummer": 1, "Subnummer": 1}}, "Naam": "source1"} + {"id": 19, "node": 0, "id_reference": {"table": "Netvoedingen", "key": {"Knooppunt.Nummer": 1, "Subnummer": 1}}, "Naam": "source1"} ], "shunt": [ - {"id": 20, "node": 2, "id_reference": {"table": "Nulpuntstransformatoren", "key": {"Knooppunt.Nummer": 3, "Subnummer": 5}}, "Naam": "zztrans1"}, - {"id": 21, "node": 3, "id_reference": {"table": "Condensatoren", "key": {"Knooppunt.Nummer": 4, "Subnummer": 9}}, "Naam": "shunt1"}, - {"id": 22, "node": 3, "id_reference": {"table": "Spoelen", "key": {"Knooppunt.Nummer": 4, "Subnummer": 1}}} + {"id": 23, "node": 2, "id_reference": {"table": "Nulpuntstransformatoren", "key": {"Knooppunt.Nummer": 3, "Subnummer": 5}}, "Naam": "zztrans1"}, + {"id": 24, "node": 3, "id_reference": {"table": "Condensatoren", "key": {"Knooppunt.Nummer": 4, "Subnummer": 9}}, "Naam": "shunt1"}, + {"id": 25, "node": 3, "id_reference": {"table": "Spoelen", "key": {"Knooppunt.Nummer": 4, "Subnummer": 1}}} + ], + "three_winding_transformer": + [ + {"id": 27, "node_1": 1, "node_2": 2, "node_3": 4, "id_reference": {"table": "Driewikkelingstransformatoren", "key": {"Nummer": 1}}} ] } diff --git a/tests/data/vision/vision_97_en.xlsx b/tests/data/vision/vision_97_en.xlsx index 2e7ac3c9..7944f1ed 100644 Binary files a/tests/data/vision/vision_97_en.xlsx and b/tests/data/vision/vision_97_en.xlsx differ diff --git a/tests/data/vision/vision_en.xlsx b/tests/data/vision/vision_en.xlsx index e3040197..bfd69888 100644 Binary files a/tests/data/vision/vision_en.xlsx and b/tests/data/vision/vision_en.xlsx differ diff --git a/tests/data/vision/vision_nl.xlsx b/tests/data/vision/vision_nl.xlsx index ed835ca6..3aa2b19e 100644 Binary files a/tests/data/vision/vision_nl.xlsx and b/tests/data/vision/vision_nl.xlsx differ diff --git a/tests/data/vision/vision_validation.vnf b/tests/data/vision/vision_validation.vnf index f38d7569..dd58a26d 100644 --- a/tests/data/vision/vision_validation.vnf +++ b/tests/data/vision/vision_validation.vnf @@ -1,131 +1,143 @@ -V9.4 +V9.7 NETWORK -[OPTIONS] -Currency=$ +[PROPERTIES] +#System Currency:'$' +#General +#Invisible +#History +#HistoryItems +#Users [] [SHEET] -#1 1 'Sheet 1' $00C0C0C0 $00000000 0 0 0 0 0 0 0 0 0 0 +#General GUID:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' Name:'Sheet 1' Color:$00C0C0C0 [] [NODE] -#1 1 44875.5806865509 0 0 0 'node1' '' '' 11 1 '' '' 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -#2 0 0 0 0 0 -#5 0 0 0 0 0 0.5 0 1 1 1 0 0 0 0 0 -#9 1 14800 14800 1 $00000000 6 4 0 $00000000 10 'Arial' 0 0 0 0 0 -70 -20 20 5 5 0 50 -#1 2 44875.5937016435 0 0 0 'node2' '' '' 11 1 '' '' 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -#2 0 0 0 0 0 -#5 0 0 0 0 0 0.5 0 1 1 1 0 0 0 0 0 -#9 1 14940 14800 1 $00000000 6 4 0 $00000000 10 'Arial' 0 0 0 0 0 -70 -20 20 5 5 0 50 -#1 3 44875.5965067593 0 0 0 'node3' '' '' 0.4 1 '' '' 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -#2 0 0 0 0 0 -#5 0 0 0 0 0 0.5 0 1 1 1 0 0 0 0 0 -#9 1 15100 14800 1 $00000000 14 4 0 $00000000 10 'Arial' 0 0 0 0 0 -150 -20 20 5 5 0 50 -#1 4 44875.5989385185 0 0 0 'node4' '' '' 11 1 '' '' 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -#2 0 0 0 0 0 -#5 0 0 0 0 0 0.5 0 1 1 1 0 0 0 0 0 -#9 1 15100 15100 1 $00000000 8 4 0 $00000000 10 'Arial' 0 0 0 0 0 -90 -20 20 5 5 0 50 -#1 5 44886.4465440509 0 0 0 '' '' '' 0.4 1 '' '' 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -#2 0 0 0 0 0 -#5 0 0 0 0 0 0.5 0 1 1 1 0 0 0 0 0 -#9 1 15040 14580 1 $00000000 4 4 0 $00000000 10 'Arial' 0 0 0 0 0 -50 -20 20 5 5 0 50 +#General GUID:'{7FF722ED-33B3-4761-84AC-A164310D3C86}' CreationTime:44875.5806865509 Name:'node1' Unom:11 +#Railtype +#Installation Kb:0.5 Kt:1 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:14800 Y:14800 Symbol:1 Size:6 Width:4 UpstringsY:-70 FaultStringsX:-20 FaultStringsY:20 NoteX:5 NoteY:5 IconY:50 +#General GUID:'{1ED177A7-1F5D-4D81-8DE7-AB3E58512E0B}' CreationTime:44875.5937016435 Name:'node2' Unom:11 +#Railtype +#Installation Kb:0.5 Kt:1 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:14940 Y:14800 Symbol:1 Size:6 Width:4 UpstringsY:-70 FaultStringsX:-20 FaultStringsY:20 NoteX:5 NoteY:5 IconY:50 +#General GUID:'{DDE3457B-DB9A-4DA9-9564-6F49E0F296BD}' CreationTime:44875.5965067593 Name:'node3' Unom:0.4 +#Railtype +#Installation Kb:0.5 Kt:1 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15100 Y:14800 Symbol:1 Size:14 Width:4 UpstringsY:-150 FaultStringsX:-20 FaultStringsY:20 NoteX:5 NoteY:5 IconY:50 +#General GUID:'{A79AFDE9-4096-4BEB-AB63-2B851D7FC6D1}' CreationTime:44875.5989385185 Name:'node4' Unom:11 +#Railtype +#Installation Kb:0.5 Kt:1 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15100 Y:15100 Symbol:1 Size:8 Width:4 UpstringsY:-90 FaultStringsX:-20 FaultStringsY:20 NoteX:5 NoteY:5 IconY:50 +#General GUID:'{7848DBC8-9685-452C-89AF-9AB308224689}' CreationTime:44886.4465440509 Unom:0.4 +#Railtype +#Installation Kb:0.5 Kt:1 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15040 Y:14580 Symbol:1 Size:4 Width:4 UpstringsY:-50 FaultStringsX:-20 FaultStringsY:20 NoteX:5 NoteY:5 IconY:50 [] [LINK] -#1 1 44875.5943923958 0 0 0 1 2 'link1' 1 1 '' '' 0 '' '' 0 0 0 0 0 1 0 -#9 1 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 -15 6 0 4 -20 20 0 5 0 0 # 14800 14860 ## 14940 14860 +#General GUID:'{A4F61576-CFE1-4256-AF66-06AA0B7F5527}' CreationTime:44875.5943923958 Node1:'{7FF722ED-33B3-4761-84AC-A164310D3C86}' Node2:'{1ED177A7-1F5D-4D81-8DE7-AB3E58512E0B}' Name:'link1' SwitchState1:1 SwitchState2:1 RailConnectivity:1 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' Strings1X:12 Strings1Y:6 Strings2X:-15 Strings2Y:6 MidStringsY:4 FaultStringsX:-20 FaultStringsY:20 NoteY:5 FirstCorners:'{(14800 14860) }' SecondCorners:'{(14940 14860) }' [] [LINE] -#1 1 44875.5945847222 44886 0 0 1 2 'line1' 1 1 '' '' 0 '' '' 0 0 0 0 0 0 -#2 0.002 0.008 0.5 0.002 0.008 0.5 1000 1000 1000 0 30 0 0 500 '' -#9 1 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 -15 6 0 4 -20 20 0 5 0 0 # 14800 14780 ## 14940 14780 +#General GUID:'{B9558FB9-D06D-4B4D-A43F-0504597BD575}' CreationTime:44875.5945847222 MutationDate:44886 Node1:'{7FF722ED-33B3-4761-84AC-A164310D3C86}' Node2:'{1ED177A7-1F5D-4D81-8DE7-AB3E58512E0B}' Name:'line1' SwitchState1:1 SwitchState2:1 +#LinePart R:0.002 X:0.008 C:0.5 R0:0.002 X0:0.008 C0:0.5 Inom1:1000 Inom2:1000 Inom3:1000 TR:30 Length:500 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' Strings1X:12 Strings1Y:6 Strings2X:-15 Strings2Y:6 MidStringsY:4 FaultStringsX:-20 FaultStringsY:20 NoteY:5 FirstCorners:'{(14800 14780) }' SecondCorners:'{(14940 14780) }' [] [CABLE] -#1 1 44875.5938880671 0 0 0 1 2 'cable1' 1 1 '' '' 0 '' '' 0 0 0 0 0 0 P 1 0 -#2 123 1 '10.5kV 3x240mm² Al XLPE' 2 2 '' -#3 '240 Al X' 10 0 0.162 0.089 0.45 0.0004 0.68 0.339 0.45 455 0.5 399 0.75 360 1 342 22.8 90 90 250 50 150 -#9 1 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 -15 6 0 4 -20 20 0 5 0 0 # 14800 14740 ## 14940 14740 +#General GUID:'{2FF748A9-78F3-4145-9E08-2CAEA5658F82}' CreationTime:44875.5938880671 Node1:'{7FF722ED-33B3-4761-84AC-A164310D3C86}' Node2:'{1ED177A7-1F5D-4D81-8DE7-AB3E58512E0B}' Name:'cable1' SwitchState1:1 SwitchState2:1 DynModel:'P' DynSection:1 +#CablePart Length:123 ParallelCableCount:1 CableType:'10.5kV 3x240mm² Al XLPE' GroundResistivityIndex:2 AmpacityFactor:2 +#CableType ShortName:'240 Al X' Unom:10 R:0.162 X:0.089 C:0.45 TanDelta:0.0004 R0:0.68 X0:0.339 C0:0.45 Inom0:455 G1:0.5 Inom1:399 G2:0.75 Inom2:360 G3:1 Inom3:342 Ik1s:22.8 TR:90 TInom:90 TIk1s:250 Frequency:50 PulseVelocity:150 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' Strings1X:12 Strings1Y:6 Strings2X:-15 Strings2Y:6 MidStringsY:4 FaultStringsX:-20 FaultStringsY:20 NoteY:5 FirstCorners:'{(14800 14740) }' SecondCorners:'{(14940 14740) }' [] [TRANSFORMER] -#1 1 44875.5968281481 0 0 0 2 3 'transformer1' 1 1 '' '' 0 '' '' 0 0 0 0 0 '10750/420 V 630 kVA' 0 0 0 0 0 0 0 1 0 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 1 -100 0 100 0 0 -#2 '630 KVA' 0.63 10.75 0.42 4 5.2 0.745 0 0.0019 0.011 21.6 D YN 5 1 0.25 5 3 1 0 0 -#3 0 1.04 1000 0 0 0 0 -#9 1 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 -15 6 0 24 -20 20 0 5 0 0 # 14940 14780 ## 15100 14780 +#General GUID:'{D783FB03-DAF4-4471-B1AC-B74451717DF4}' CreationTime:44875.5968281481 MutationDate:45463 Node1:'{1ED177A7-1F5D-4D81-8DE7-AB3E58512E0B}' Node2:'{DDE3457B-DB9A-4DA9-9564-6F49E0F296BD}' Name:'transformer1' SwitchState1:1 SwitchState2:1 TransformerType:'10750/420 V 630 kVA' Earthing1:0 Earthing2:1 TapPosition:3 +#VoltageControl OwnControl:True ControlStatus:1 MeasureSide:2 SetPoint:0.4 DeadBand:0.1 Rc:0.008 CompoundingAtGeneration:True Pmin1:-100 Pmax1:100 +#TransformerType ShortName:'630 KVA' Snom:0.63 Unom1:10.75 Unom2:0.42 Uk:4 Pk:5.2 Po:0.745 R0:0.0019 Z0:0.011 Ik2s:21.6 WindingConnection1:'D' WindingConnection2:'YN' ClockNumber:5 TapSide:1 TapSize:0.25 TapMin:5 TapNom:3 TapMax:1 +#Dynamics KneeFluxLeg1:1.04 KneeFluxLeg2:1.04 KneeFluxLeg3:1.04 MagnetizingInductanceRatioLeg1:1000 MagnetizingInductanceRatioLeg2:1000 MagnetizingInductanceRatioLeg3:1000 RemanentFluxLeg1:0.7 RemanentFluxLeg2:0.7 RemanentFluxLeg3:0.7 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' Strings1X:12 Strings1Y:6 Strings2X:-15 Strings2Y:6 MidStringsY:24 FaultStringsX:-20 FaultStringsY:20 NoteY:5 FirstCorners:'{(14940 14780) }' SecondCorners:'{(15100 14780) }' [] [SPECIAL TRANSFORMER] -#1 1 44875.5992225231 44877 44877 0 2 4 'special_trans1' 1 1 '' '' 0 '' '' 0 0 0 0 0 '10600/10600 V/23 MVA Smit' 0 0 0 0 0 0 5 0 0 0 0 0 0 0 0 0 0 1 -100 0 100 0 0 0 0 0 -#2 11 '23 MVA' 23 10.6 10.6 0.45 0.5 0.6 0 0 0 0 0 0 0.019 19 1 0.15 1 7 13 -#9 1 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 -15 6 24 0 -20 20 5 0 0 0 # 14940 14840 15000 14840 ## 15100 15020 15000 15020 +#General GUID:'{B46288EF-B591-45B1-8BAF-818BF23CB626}' CreationTime:44875.5992225231 MutationDate:45463 RevisionDate:44877 Node1:'{1ED177A7-1F5D-4D81-8DE7-AB3E58512E0B}' Node2:'{A79AFDE9-4096-4BEB-AB63-2B851D7FC6D1}' Name:'special_trans1' SwitchState1:1 SwitchState2:1 SpecialTransformerType:'10600/10600 V/23 MVA Smit' Earthing:0 TapPosition:5 TapPosition_b:0 TapPosition_c:0 +#VoltageControl Present:True Status:True MeasureSide:2 Setpoint:10 Deadband:0.1 Rc:0.008 CompoundingAtGeneration:True Pmin1:-100 Pmax1:100 +#PControl Pmin:0 Pmax:0 +#SpecialTransformerType Sort:11 ShortName:'23 MVA' Snom:23 Unom1:10.6 Unom2:10.6 Ukmin:0.45 Uknom:0.5 Ukmax:0.6 Z0:0.019 Ik2s:19 TapSide:1 TapSize:0.15 TapMin:1 TapNom:7 TapMax:13 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' Strings1X:12 Strings1Y:6 Strings2X:-15 Strings2Y:6 MidStringsX:24 FaultStringsX:-20 FaultStringsY:20 NoteX:5 FirstCorners:'{(14940 14840) (15000 14840) }' SecondCorners:'{(15100 15020) (15000 15020) }' [] -[COIL] -#1 1 44875.5954900694 0 0 0 1 2 'rcoil1' 1 1 '' '' 0 '' '' 0 0 0 0 0 '0.3 ohm 300 A' -#2 '0.3 Ohm' 10 300 0 0.3 0 0.3 0 0.3 14.4 -#9 1 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 -15 6 0 24 -20 20 0 5 0 0 # 14800 14820 ## 14940 14820 +[REACTANCECOIL] +#General GUID:'{7EB807C6-F2ED-42D0-83C4-478CDCCE17F3}' CreationTime:44875.5954900694 Node1:'{7FF722ED-33B3-4761-84AC-A164310D3C86}' Node2:'{1ED177A7-1F5D-4D81-8DE7-AB3E58512E0B}' Name:'rcoil1' SwitchState1:1 SwitchState2:1 ReactanceCoilType:'0.3 ohm 300 A' +#ReactanceCoilType ShortName:'0.3 Ohm' Unom:10 Inom:300 X:0.3 X0:0.3 X2:0.3 Ik2s:14.4 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' Strings1X:12 Strings1Y:6 Strings2X:-15 Strings2Y:6 MidStringsY:24 FaultStringsX:-20 FaultStringsY:20 NoteY:5 FirstCorners:'{(14800 14820) }' SecondCorners:'{(14940 14820) }' [] [THREEWINDINGSTRANSFORMER] -#1 1 44886.4468122917 0 0 0 2 3 5 '' 1 1 1 '' '' '' 0 0 0 0 0 '' 0 0 0 0 0 0 0 0 1 2 3 1 1 5 1 2 0 0 0 0 0 0 0 0 -#2 30 40 35 11 0.4 0.4 1 2 3 10 20 30 20 30 40 60 3 0 0 0 0 0 0 0 0 0 D YN YN 5 7 1 0.1 -5 1 6 2 0.01 -7 2 8 -#9 1 15020 14680 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 -15 6 -15 6 20 20 -20 20 # 14940 14740 15020 14740 ## 15100 14740 15020 14740 ### 15040 14620 14940 14620 14940 14680 +#General GUID:'{CA65D7DA-A029-4D30-867C-91D85ACADF41}' CreationTime:44886.4468122917 MutationDate:45463 Node1:'{1ED177A7-1F5D-4D81-8DE7-AB3E58512E0B}' Node2:'{DDE3457B-DB9A-4DA9-9564-6F49E0F296BD}' Node3:'{7848DBC8-9685-452C-89AF-9AB308224689}' SwitchState1:1 SwitchState2:1 SwitchState3:1 Earthing1:0 Earthing2:1 Re2:2 Xe2:3 Earthing3:1 Re3:1 Xe3:5 TapControlled:1 TapFixed:2 +#VoltageControl Present:True Status:1 MeasuringSide:2 Setpoint:0.4 Deadband:0.1 Rc:0.008 +#ThreewindingsTransformerType Snom1:30 Snom2:40 Snom3:35 Unom1:11 Unom2:0.4 Unom3:0.4 Uk12:1 Uk13:2 Uk23:3 Pk12:10 Pk13:20 Pk23:30 SAt12:20 SAt13:30 SAt23:40 Po:60 Io:3 Connection1:'D' Connection2:'YN' Connection3:'YN' ClockNumber12:5 ClockNumber13:7 TapSideControlled:1 TapSizeControlled:0.1 TapminControlled:-5 TapnomControlled:1 TapmaxControlled:6 TapSideFixed:2 TapSizeFixed:0.01 TapminFixed:-7 TapnomFixed:2 TapmaxFixed:8 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15020 Y:14680 Strings1X:12 Strings1Y:6 Strings2X:-15 Strings2Y:6 Strings3X:-15 Strings3Y:6 MidStringsX:20 MidStringsY:20 NoteX:-20 NoteY:20 FirstCorners:'{(14940 14740) (15020 14740) }' SecondCorners:'{(15100 14740) (15020 14740) }' ThirdCorners:'{(15040 14620) (14940 14620) (14940 14680) }' [] [SOURCE] -#1 1 1 44875.5933864468 0 0 0 'source1' 1 '' 0 0 0 0 0 0 1 0 1100 990 1100 0.1 3 0 0 0 0 0 -#9 1 14720 14820 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 -15 6 -125 0 5 5 0 +#General Node:'{7FF722ED-33B3-4761-84AC-A164310D3C86}' GUID:'{65633711-995E-4B8E-B2BE-B554AA90E013}' CreationTime:44875.5933864468 Name:'source1' SwitchState:1 Uref:1 Sk2nom:1100 Sk2min:990 Sk2max:1100 R/X:0.1 Z0/Z1:3 Profile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:14720 Y:14820 Strings1X:-15 Strings1Y:6 SymbolStringsX:-125 NoteX:5 NoteY:5 [] [SYNCHRONOUS GENERATOR] -#1 4 2 44875.605146088 0 0 0 'syngen1' 1 '' 0 0 0 0 0 0 0.5 0 0 C 0.16434205 0.95 0 1 1 1 0 0 0 0 0 0 0 0 '' -#2 11 1 0.95 0 0.1 0.014 0.2 0 0 0 1.3 1.6 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3000 0 0 0 -#9 1 15180 15040 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 125 0 -5 5 0 +#General Node:'{A79AFDE9-4096-4BEB-AB63-2B851D7FC6D1}' GUID:'{4B10F0F0-E728-4494-ABFF-613A2AA96524}' CreationTime:44875.605146088 Name:'syngen1' SwitchState:1 Pref:0.5 ControlSort:'C' Qref:0.16434205 CosRef:0.95 Uref:1 UQDroop:1 QlimitingType:1 Earthing:0 Profile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' +#SynchronousGeneratorType Unom:11 Snom:1 CosNom:0.95 Qmax:0.1 rg:0.014 Xd2sat:0.2 UfMax:1.3 Xdsat:1.6 Nnom:3000 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15180 Y:15040 Strings1X:12 Strings1Y:6 SymbolStringsX:125 NoteX:-5 NoteY:5 [] [LOAD] -#1 3 6 44875.6091168403 0 0 0 'load1' 1 '' 0 0 0 0 0 0 0.1 0.1 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 '' 1 -#9 1 15220 14680 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 165 0 -5 5 0 +#General Node:'{DDE3457B-DB9A-4DA9-9564-6F49E0F296BD}' GUID:'{2E0B8D28-9881-482A-B2F6-A92D90D2FEEB}' CreationTime:44875.6091168403 Name:'load1' SwitchState:1 P:0.1 Q:0.1 Fp1:1 Fq1:1 Fp2:1 Fq2:1 Fp3:1 Fq3:1 LoadBehaviour:'{0993B8C8-033F-4345-AF80-FDB160F0427D}' LoadGrowth:'{EC874317-80BD-4714-AC1C-F611E2BDAF50}' Profile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' Earthing:0 HarmonicImpedance:True +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15220 Y:14680 Strings1X:12 Strings1Y:6 SymbolStringsX:165 NoteX:-5 NoteY:5 [] [TRANSFORMERLOAD] -#1 3 4 44875.6029419907 0 0 0 'transformer_load_1' 1 '' 0 0 0 0 0 0 0.2 0.02 -6 -2 0 0.03 0.01 0 0.01 0 0 0 0 '' 2 '' -#2 '' 0.5 0.4 0.2 8 40 50 YN YN 0 1 0.01 1 2 4 -#9 1 15180 14880 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 205 0 -5 5 0 +#General Node:'{DDE3457B-DB9A-4DA9-9564-6F49E0F296BD}' GUID:'{A7821D13-99F5-42A5-8D0E-2BF4B0F1FD23}' CreationTime:44875.6029419907 MutationDate:45457 RevisionDate:45457 Name:'transformer_load_2' SwitchState:1 LoadP:0.2 LoadQ:0.02 LoadBehaviour:'{30ACBB5E-4C4B-42C5-B5A6-958BE8B77CFC}' LoadGrowth:'{086EADBF-BA43-4A2B-B048-A37DF08CD17D}' Profile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' GenerationGrowth:'{EC874317-80BD-4714-AC1C-F611E2BDAF50}' GenerationProfile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' PVGrowth:'{EC874317-80BD-4714-AC1C-F611E2BDAF50}' PVProfile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' TapPosition:2 +#TransformerType Snom:0.5 Unom1:0.4 Unom2:0.2 Uk:8 Pk:40 Po:50 WindingConnection1:'YN' WindingConnection2:'YN' ClockNumber:0 TapSide:1 TapSize:0.01 TapMin:1 TapNom:2 TapMax:4 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15180 Y:14900 Strings1X:12 Strings1Y:6 SymbolStringsX:205 NoteX:-5 NoteY:5 +#General Node:'{DDE3457B-DB9A-4DA9-9564-6F49E0F296BD}' GUID:'{68A2AEA4-2263-4902-81CD-E06C114D727C}' CreationTime:45457.4723790857 Name:'transformer_load_1' SwitchState:1 LoadP:0.2 LoadQ:0.02 LoadBehaviour:'{30ACBB5E-4C4B-42C5-B5A6-958BE8B77CFC}' LoadGrowth:'{086EADBF-BA43-4A2B-B048-A37DF08CD17D}' Profile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' GenerationP:0.03 GenerationQ:0.01 GenerationGrowth:'{EC874317-80BD-4714-AC1C-F611E2BDAF50}' GenerationProfile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' PVPnom:0.01 PVGrowth:'{EC874317-80BD-4714-AC1C-F611E2BDAF50}' PVProfile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' TapPosition:2 +#TransformerType Snom:0.5 Unom1:0.4 Unom2:0.2 Uk:8 Pk:40 Po:50 WindingConnection1:'YN' WindingConnection2:'YN' ClockNumber:0 TapSide:1 TapSize:0.01 TapMin:1 TapNom:2 TapMax:4 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15180 Y:14840 Strings1X:12 Strings1Y:6 SymbolStringsX:205 NoteX:-5 NoteY:5 [] [SHUNTCAPACITOR] -#1 4 9 44875.6093570833 0 0 0 'shunt1' 1 '' 0 0 0 0 0 0 0.5 11 0 0 0 0 0 0 0 0 0 0 0 -#3 0 0 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 100 -#9 1 15200 15100 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 125 0 -5 5 0 +#General Node:'{A79AFDE9-4096-4BEB-AB63-2B851D7FC6D1}' GUID:'{346C4567-5D4D-439A-BC74-BA77684040F9}' CreationTime:44875.6093570833 Name:'shunt1' SwitchState:1 Q:0.5 Unom:11 Profile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' Earthing:0 +#ActiveFilter h3:100 h5:100 h7:100 h9:100 h11:100 h13:100 h15:100 h17:100 h19:100 h21:100 h23:100 h25:100 h27:100 h29:100 h31:100 h33:100 h35:100 h37:100 h39:100 h41:100 h43:100 h45:100 h47:100 h49:100 h51:100 h53:100 h55:100 h57:100 h59:100 h61:100 h63:100 h65:100 h67:100 h69:100 h71:100 h73:100 h75:100 h77:100 h79:100 h81:100 h83:100 h85:100 h87:100 h89:100 h91:100 h93:100 h95:100 h97:100 h99:100 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15200 Y:15100 Strings1X:12 Strings1Y:6 SymbolStringsX:125 NoteX:-5 NoteY:5 [] [SHUNTCOIL] -#1 4 1 44886.4459253472 0 0 0 '' 1 '' 0 0 0 0 0 0 0.5 11 0 0 0 0 0 0 0 0 -#9 1 15180 15160 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 125 0 -5 5 0 +#General Node:'{A79AFDE9-4096-4BEB-AB63-2B851D7FC6D1}' GUID:'{394FBE07-A7FA-4249-B7FF-EC42A1C55303}' CreationTime:44886.4459253472 SwitchState:1 Q:0.5 Unom:11 Profile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' Earthing:0 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15180 Y:15160 Strings1X:12 Strings1Y:6 SymbolStringsX:125 NoteX:-5 NoteY:5 [] -[ZIGZAGCOIL] -#1 3 5 44875.6047998032 0 0 0 'zztrans1' 1 '' 0 0 0 0 0 0 1 0 0 0 '7 Ohm' -#2 0 7 -#9 1 15180 14940 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 165 0 -5 5 0 +[EARTHINGTRANSFORMER] +#General Node:'{DDE3457B-DB9A-4DA9-9564-6F49E0F296BD}' GUID:'{1B60A9EB-9FB9-4C80-BDAC-717D8F24D428}' CreationTime:44875.6047998032 Name:'zztrans1' SwitchState:1 Earthing:1 EarthingTransformerType:'7 Ohm' +#EarthingTransformerType X0:7 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15180 Y:14940 Strings1X:12 Strings1Y:6 SymbolStringsX:165 NoteX:-5 NoteY:5 [] [WINDTURBINE] -#1 3 3 44875.6020721991 44877 44877 0 'wind1' 1 '' 0 0 0 0 0 0 0 12 0 0 30 'Async/75 kVA/0.4 kV' -#2 0.075 0.40000001 7 0.1 4 15 26 26 -#11 0 1 1 0.95 0.5 1 0 1 0 1.03 -0.5 1.05 -0.5 -#9 1 15240 14840 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 185 0 -5 5 0 +#General Node:'{DDE3457B-DB9A-4DA9-9564-6F49E0F296BD}' GUID:'{AC17891F-98ED-4539-AB95-3D22CDD27F4C}' CreationTime:44875.6020721991 MutationDate:44877 RevisionDate:44877 Name:'wind1' SwitchState:1 NumberOf:1 WindSpeedOrPref:0 WindSpeed:12 Profile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' AxisHeight:30 WindTurbineType:'Async/75 kVA/0.4 kV' +#WindTurbineType Pnom:0.075 Unom:0.40000001 Ik/Inom:7 R/X:0.1 WindSpeedCutIn:4 WindSpeedNom:15 WindSpeedCuttingOut:26 WindSpeedCuttedOut:26 +#QControl CosRef:1 NoPNoQ:True Input1:0.95 Output1:0.5 Input2:1 Output2:0 Input3:1 Output3:0 Input4:1.03 Output4:-0.5 Input5:1.05 Output5:-0.5 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15240 Y:14800 Strings1X:12 Strings1Y:6 SymbolStringsX:185 NoteX:-5 NoteY:5 [] [PV] -#1 3 1 44875.6000511458 0 0 0 'pv1' 1 '' 0 0 0 0 0 0 1000 0 52 5 0.1 180 30 0.1 180 30 0.1 180 30 '' -#2 0.5 1 '0,1 pu: 93 %; 1 pu: 97 %' 0 -#11 0 1 1 0.95 0.5 1 0 1 0 1.03 -0.5 1.05 -0.5 -#4 0.05 0 0.1 93 0.2 95 0.3 96 1 97 -#9 1 15180 14740 $00000000 1 1 0 $00000000 10 'Arial' 0 0 0 12 6 185 0 -5 5 0 +#General Node:'{DDE3457B-DB9A-4DA9-9564-6F49E0F296BD}' GUID:'{186B67F9-F676-48CD-81A7-F5ECFDCB4EF2}' CreationTime:44875.6000511458 Name:'pv1' SwitchState:1 Scaling:1000 Profile:'{A4D813DF-1EE1-4153-806C-DC228D251A79}' Longitude:52 Latitude:5 Panel1Pnom:0.1 Panel1Orientation:180 Panel1Slope:30 Panel2Pnom:0.1 Panel2Orientation:180 Panel2Slope:30 Panel3Pnom:0.1 Panel3Orientation:180 Panel3Slope:30 +#Inverter Snom:0.5 Ik/Inom:1 EfficiencyType:'0,1 pu: 93 %; 1 pu: 97 %' +#QControl CosRef:1 NoPNoQ:True Input1:0.95 Output1:0.5 Input2:1 Output2:0 Input3:1 Output3:0 Input4:1.03 Output4:-0.5 Input5:1.05 Output5:-0.5 +#EfficiencyType Input1:0.05 Output1:0 Input2:0.1 Output2:93 Input3:0.2 Output3:95 Input4:0.3 Output4:96 Input5:1 Output5:97 +#Presentation Sheet:'{AC5FD754-220B-47EF-B98C-367CB49E8C75}' X:15180 Y:14740 Strings1X:12 Strings1Y:6 SymbolStringsX:185 NoteX:-5 NoteY:5 [] diff --git a/tests/unit/converters/test_tabular_converter.py b/tests/unit/converters/test_tabular_converter.py index 895ffb7b..b54950b9 100644 --- a/tests/unit/converters/test_tabular_converter.py +++ b/tests/unit/converters/test_tabular_converter.py @@ -2,7 +2,7 @@ # # SPDX-License-Identifier: MPL-2.0 from pathlib import Path -from typing import Tuple +from typing import Callable, Tuple from unittest.mock import MagicMock, call, patch import numpy as np @@ -154,12 +154,31 @@ def test_convert_table_to_component(converter: TabularConverter, tabular_data_no assert (pgm_node_data["u_rated"] == [10.5e3, 400]).all() +def test_convert_table_to_component__filters( + converter: TabularConverter, tabular_data_no_units_no_substitutions: TabularData +): + converter._convert_col_def_to_attribute = MagicMock() + converter._parse_table_filters = MagicMock() + node_attributes_with_filter = {"id": "id_number", "u_rated": "u_nom", "filters": [{"test_fn": {}}]} + converter._convert_table_to_component( + data=tabular_data_no_units_no_substitutions, + data_type="input", + table="nodes", + component="node", + attributes=node_attributes_with_filter, + extra_info=None, + ) + converter._parse_table_filters.assert_called_once_with( + data=tabular_data_no_units_no_substitutions, + table="nodes", + filtering_functions=node_attributes_with_filter["filters"], + ) + + def test_convert_col_def_to_attribute( converter: TabularConverter, tabular_data_no_units_no_substitutions: TabularData, pgm_node_empty: SingleDataset, - pgm_line_empty: SingleDataset, - pgm_power_sensor_empty: SingleDataset, ): with pytest.raises( KeyError, match=r"Could not find attribute 'incorrect_attribute' for 'nodes'. " r"\(choose from: id, u_rated\)" @@ -171,6 +190,7 @@ def test_convert_col_def_to_attribute( component="node", attr="incorrect_attribute", col_def="id_number", + table_mask=None, extra_info=None, ) @@ -182,6 +202,7 @@ def test_convert_col_def_to_attribute( component="node", attr="extra", col_def="u_nom", + table_mask=None, extra_info={0: {}, 1: {}}, ) @@ -193,6 +214,7 @@ def test_convert_col_def_to_attribute( component="node", attr="u_rated", col_def="u_nom", + table_mask=None, extra_info=None, ) assert len(pgm_node_empty) == 1 @@ -210,6 +232,7 @@ def test_convert_col_def_to_attribute( component="node", attr="u_rated", col_def=["id_number", "u_nom"], + table_mask=None, extra_info=None, ) @@ -218,12 +241,22 @@ def test_handle_extra_info(converter: TabularConverter, tabular_data_no_units_no uuids = np.array([0, 1]) # possible to call function with extra_info = None converter._handle_extra_info( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def="u_nom", uuids=uuids, extra_info=None + data=tabular_data_no_units_no_substitutions, + table="nodes", + col_def="u_nom", + uuids=uuids, + table_mask=None, + extra_info=None, ) # _handle_extra_info creates extra info entry for id's that don't exist and updates existing entries extra_info: ExtraInfo = {0: {"some_value": "some_key"}} converter._handle_extra_info( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def="u_nom", uuids=uuids, extra_info=extra_info + data=tabular_data_no_units_no_substitutions, + table="nodes", + col_def="u_nom", + uuids=uuids, + table_mask=None, + extra_info=extra_info, ) assert extra_info == { 0: {"some_value": "some_key", "u_nom": 10500.0}, @@ -238,7 +271,14 @@ def test_handle_extra_info__units(converter: TabularConverter, tabular_data: Tab tabular_data._units = UnitMapping({"V": {"kV": 1000.0}}) # Act - converter._handle_extra_info(data=tabular_data, table="nodes", col_def="u_nom", uuids=uuids, extra_info=extra_info) + converter._handle_extra_info( + data=tabular_data, + table="nodes", + col_def="u_nom", + uuids=uuids, + table_mask=None, + extra_info=extra_info, + ) # Assert assert extra_info == {0: {"u_nom": 10500.0}, 1: {"u_nom": 400.0}} @@ -274,7 +314,11 @@ def test_serialize_data(converter: TabularConverter, pgm_node_empty: SingleDatas def test_parse_col_def(converter: TabularConverter, tabular_data_no_units_no_substitutions: TabularData): with pytest.raises(TypeError, match=r"Invalid column definition: \(\)"): converter._parse_col_def( - data=tabular_data_no_units_no_substitutions, table="table_name", col_def=(), extra_info=None + data=tabular_data_no_units_no_substitutions, + table="table_name", + col_def=(), + table_mask=None, + extra_info=None, ) # type(col_def) == int @@ -282,10 +326,14 @@ def test_parse_col_def(converter: TabularConverter, tabular_data_no_units_no_sub "power_grid_model_io.converters.tabular_converter.TabularConverter._parse_col_def_const" ) as mock_parse_col_def_const: converter._parse_col_def( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def=50, extra_info=None + data=tabular_data_no_units_no_substitutions, + table="nodes", + col_def=50, + table_mask=None, + extra_info=None, ) mock_parse_col_def_const.assert_called_once_with( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def=50 + data=tabular_data_no_units_no_substitutions, table="nodes", table_mask=None, col_def=50 ) # type(col_def) == float @@ -293,10 +341,14 @@ def test_parse_col_def(converter: TabularConverter, tabular_data_no_units_no_sub "power_grid_model_io.converters.tabular_converter.TabularConverter._parse_col_def_const" ) as mock_parse_col_def_const: converter._parse_col_def( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def=4.0, extra_info=None + data=tabular_data_no_units_no_substitutions, + table="nodes", + col_def=4.0, + table_mask=None, + extra_info=None, ) mock_parse_col_def_const.assert_called_once_with( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def=4.0 + data=tabular_data_no_units_no_substitutions, table="nodes", col_def=4.0, table_mask=None ) # type(col_def) == str (regular expression) @@ -314,6 +366,7 @@ def test_parse_col_def(converter: TabularConverter, tabular_data_no_units_no_sub "value_column": "u_nom", } }, + table_mask=None, extra_info=None, ) mock_parse_reference.assert_called_once_with( @@ -323,6 +376,7 @@ def test_parse_col_def(converter: TabularConverter, tabular_data_no_units_no_sub query_column="from_node_side", key_column="id_number", value_column="u_nom", + table_mask=None, ) # type(col_def) == str @@ -330,10 +384,14 @@ def test_parse_col_def(converter: TabularConverter, tabular_data_no_units_no_sub "power_grid_model_io.converters.tabular_converter.TabularConverter._parse_col_def_column_name" ) as mock_parse_col_def_column_name: converter._parse_col_def( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def="col_name", extra_info=None + data=tabular_data_no_units_no_substitutions, + table="nodes", + col_def="col_name", + table_mask=None, + extra_info=None, ) mock_parse_col_def_column_name.assert_called_once_with( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def="col_name" + data=tabular_data_no_units_no_substitutions, table="nodes", col_def="col_name", table_mask=None ) # type(col_def) == dict @@ -341,10 +399,18 @@ def test_parse_col_def(converter: TabularConverter, tabular_data_no_units_no_sub "power_grid_model_io.converters.tabular_converter.TabularConverter._parse_col_def_filter" ) as mock_parse_col_def_filter: converter._parse_col_def( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def={}, extra_info=None + data=tabular_data_no_units_no_substitutions, + table="nodes", + col_def={}, + table_mask=None, + extra_info=None, ) mock_parse_col_def_filter.assert_called_once_with( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def={}, extra_info=None + data=tabular_data_no_units_no_substitutions, + table="nodes", + col_def={}, + table_mask=None, + extra_info=None, ) # type(col_def) == list @@ -352,47 +418,70 @@ def test_parse_col_def(converter: TabularConverter, tabular_data_no_units_no_sub "power_grid_model_io.converters.tabular_converter.TabularConverter._parse_col_def_composite" ) as mock_parse_col_def_composite: converter._parse_col_def( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def=[], extra_info=None + data=tabular_data_no_units_no_substitutions, + table="nodes", + col_def=[], + table_mask=None, + extra_info=None, ) mock_parse_col_def_composite.assert_called_once_with( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def=[] + data=tabular_data_no_units_no_substitutions, table="nodes", col_def=[], table_mask=None ) def test_parse_col_def_const(converter: TabularConverter, tabular_data_no_units_no_substitutions: TabularData): with pytest.raises(AssertionError): converter._parse_col_def_const( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def="str" # type: ignore + data=tabular_data_no_units_no_substitutions, table="nodes", col_def="str", table_mask=None # type: ignore ) # type(col_def) == int - col_int = converter._parse_col_def_const(data=tabular_data_no_units_no_substitutions, table="nodes", col_def=50) + col_int = converter._parse_col_def_const( + data=tabular_data_no_units_no_substitutions, table="nodes", col_def=50, table_mask=None + ) assert_frame_equal(col_int, pd.DataFrame([50, 50])) # type(col_def) == float - col_int = converter._parse_col_def_const(data=tabular_data_no_units_no_substitutions, table="nodes", col_def=3.0) + col_int = converter._parse_col_def_const( + data=tabular_data_no_units_no_substitutions, table="nodes", col_def=3.0, table_mask=None + ) + assert_frame_equal(col_int, pd.DataFrame([3.0, 3.0])) + + +def test_parse_col_def_const__no_filter( + converter: TabularConverter, tabular_data_no_units_no_substitutions: TabularData +): + col_int = converter._parse_col_def_const( + data=tabular_data_no_units_no_substitutions, table="nodes", col_def=3.0, table_mask=None + ) assert_frame_equal(col_int, pd.DataFrame([3.0, 3.0])) def test_parse_col_def_column_name(converter: TabularConverter, tabular_data_no_units_no_substitutions: TabularData): with pytest.raises(AssertionError): converter._parse_col_def_column_name( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def=1 # type: ignore + data=tabular_data_no_units_no_substitutions, table="nodes", col_def=1, table_mask=None # type: ignore ) df_multiple_columns = converter._parse_col_def_column_name( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def=" wrong_column | id_number | u_nom " + data=tabular_data_no_units_no_substitutions, + table="nodes", + col_def=" wrong_column | id_number | u_nom ", + table_mask=None, ) assert_frame_equal(df_multiple_columns, pd.DataFrame([1, 2], columns=["id_number"])) df_inf = converter._parse_col_def_column_name( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def="inf" + data=tabular_data_no_units_no_substitutions, table="nodes", col_def="inf", table_mask=None ) assert_frame_equal(df_inf, pd.DataFrame([np.inf, np.inf])) with pytest.raises(KeyError, match="Could not find column 'a' and 'b' and 'c' on table 'nodes'"): converter._parse_col_def_column_name( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def=" a | b | c " + data=tabular_data_no_units_no_substitutions, + table="nodes", + col_def=" a | b | c ", + table_mask=None, ) @@ -405,6 +494,7 @@ def test_parse_reference(converter: TabularConverter, tabular_data_no_units_no_s query_column="from_node_side", key_column="id_number", value_column="u_nom", + table_mask=None, ) assert_frame_equal(df_lines_from_node_long, pd.DataFrame([400.0, 10.5e3], columns=["u_nom"])) @@ -413,11 +503,13 @@ def test_parse_col_def_filter(converter: TabularConverter): # Act/Assert: with pytest.raises(AssertionError): converter._parse_col_def_filter( - data=tabular_data_no_units_no_substitutions, table="", col_def=[], extra_info=None # type: ignore + data=tabular_data_no_units_no_substitutions, table="", col_def=[], table_mask=None, extra_info=None # type: ignore ) with pytest.raises(TypeError, match="Invalid foo definition: 123"): - converter._parse_col_def_filter(data=MagicMock(), table="", col_def={"foo": 123}, extra_info=None) + converter._parse_col_def_filter( + data=MagicMock(), table="", col_def={"foo": 123}, table_mask=None, extra_info=None + ) @patch("power_grid_model_io.converters.tabular_converter.TabularConverter._parse_function") @@ -432,6 +524,7 @@ def test_parse_col_def_filter__function(mock_parse_function: MagicMock, converte data=data, table="nodes", col_def={"path.to.function": {"foo": "id_number", "bar": "u_nom"}}, + table_mask=None, extra_info=None, ) @@ -441,6 +534,7 @@ def test_parse_col_def_filter__function(mock_parse_function: MagicMock, converte table="nodes", function="path.to.function", col_def={"foo": "id_number", "bar": "u_nom"}, + table_mask=None, ) pd.testing.assert_frame_equal(result, function_result) @@ -454,12 +548,16 @@ def test_parse_col_def_filter__pandas_function(mock_parse_function: MagicMock, c # Act result = converter._parse_col_def_filter( - data=data, table="nodes", col_def={"multiply": ["id_number", "u_nom"]}, extra_info=None + data=data, table="nodes", col_def={"multiply": ["id_number", "u_nom"]}, table_mask=None, extra_info=None ) # Assert mock_parse_function.assert_called_once_with( - data=data, table="nodes", fn_name="multiply", col_def=["id_number", "u_nom"] + data=data, + table="nodes", + fn_name="multiply", + col_def=["id_number", "u_nom"], + table_mask=None, ) pd.testing.assert_frame_equal(result, function_result) @@ -477,6 +575,7 @@ def test_parse_col_def_filter__auto_id(mock_parse_auto_id: MagicMock, converter: data=data, table="lines", col_def={"auto_id": {"table": "nodes", "name": "dummy", "key": "from_node_side"}}, + table_mask=None, extra_info=extra_info, ) @@ -487,13 +586,16 @@ def test_parse_col_def_filter__auto_id(mock_parse_auto_id: MagicMock, converter: ref_table="nodes", ref_name="dummy", key_col_def="from_node_side", + table_mask=None, extra_info=extra_info, ) pd.testing.assert_frame_equal(result, auto_id_result) # Act/Assert: with pytest.raises(ValueError, match="Invalid auto_id definition: {'a': 1, 'b': 2}"): - converter._parse_col_def_filter(data=data, table="", col_def={"auto_id": {"a": 1, "b": 2}}, extra_info=None) + converter._parse_col_def_filter( + data=data, table="", col_def={"auto_id": {"a": 1, "b": 2}}, table_mask=None, extra_info=None + ) @patch("power_grid_model_io.converters.tabular_converter.TabularConverter._parse_reference") @@ -515,6 +617,7 @@ def test_parse_col_def_filter__reference(mock_parse_reference: MagicMock, conver "value_column": "u_nom", } }, + table_mask=None, extra_info=None, ) @@ -526,12 +629,15 @@ def test_parse_col_def_filter__reference(mock_parse_reference: MagicMock, conver query_column="from_node_side", key_column="id_number", value_column="u_nom", + table_mask=None, ) assert result is reference_result # Act/Assert: with pytest.raises(ValueError, match="Invalid reference definition: {'a': 1, 'b': 2}"): - converter._parse_col_def_filter(data=data, table="", col_def={"reference": {"a": 1, "b": 2}}, extra_info=None) + converter._parse_col_def_filter( + data=data, table="", col_def={"reference": {"a": 1, "b": 2}}, table_mask=None, extra_info=None + ) @patch("power_grid_model_io.converters.tabular_converter.TabularConverter._get_id") @@ -546,6 +652,7 @@ def test_parse_auto_id( ref_table=None, ref_name=None, key_col_def="id_number", + table_mask=None, extra_info=None, ) mock_get_id.assert_has_calls( @@ -566,6 +673,7 @@ def test_parse_auto_id__extra_info( ref_table=None, ref_name=None, key_col_def="id_number", + table_mask=None, extra_info=extra_info, ) mock_get_id.assert_has_calls( @@ -588,6 +696,7 @@ def test_parse_auto_id__reference_column( ref_table="nodes", ref_name=None, key_col_def={"id_number": "from_node_side"}, + table_mask=None, extra_info=extra_info, ) mock_get_id.assert_has_calls( @@ -609,6 +718,7 @@ def test_parse_auto_id__composite_key( ref_table=None, ref_name=None, key_col_def=["id_number", "u_nom"], + table_mask=None, extra_info=extra_info, ) mock_get_id.assert_has_calls( @@ -634,6 +744,7 @@ def test_parse_auto_id__named_objects( ref_table=None, ref_name="internal_node", key_col_def="id_number", + table_mask=None, extra_info=extra_info, ) mock_get_id.assert_has_calls( @@ -659,6 +770,7 @@ def test_parse_auto_id__named_keys( ref_table=None, ref_name=None, key_col_def={"id": "id_number", "node": "from_node_side"}, + table_mask=None, extra_info=extra_info, ) mock_get_id.assert_has_calls( @@ -681,6 +793,7 @@ def test_parse_auto_id__invalid_key_definition( ref_table=None, ref_name=None, key_col_def=123, # type: ignore + table_mask=None, extra_info=None, ) @@ -698,7 +811,10 @@ def test_parse_auto_id__invalid_key_definition( ) @patch("power_grid_model_io.converters.tabular_converter.TabularConverter._parse_col_def") def test_parse_pandas_function( - mock_parse_col_def: MagicMock, converter: TabularConverter, function: str, expected: Tuple[int, int, int] + mock_parse_col_def: MagicMock, + converter: TabularConverter, + function: str, + expected: Tuple[int, int, int], ): # Arrange data = MagicMock() @@ -707,10 +823,14 @@ def test_parse_pandas_function( mock_parse_col_def.return_value = parse_col_def_data # Act - result = converter._parse_pandas_function(data=data, table="foo", fn_name=function, col_def=col_def) + result = converter._parse_pandas_function( + data=data, table="foo", fn_name=function, col_def=col_def, table_mask=None + ) # Assert - mock_parse_col_def.assert_called_once_with(data=data, table="foo", col_def=col_def, extra_info=None) + mock_parse_col_def.assert_called_once_with( + data=data, table="foo", col_def=col_def, table_mask=None, extra_info=None + ) pd.testing.assert_frame_equal(result, pd.DataFrame(expected)) @@ -723,10 +843,14 @@ def test_parse_pandas_function__no_data(mock_parse_col_def: MagicMock, converter mock_parse_col_def.return_value = parse_col_def_data # Act - result = converter._parse_pandas_function(data=data, table="foo", fn_name="multiply", col_def=col_def) + result = converter._parse_pandas_function( + data=data, table="foo", fn_name="multiply", col_def=col_def, table_mask=None + ) # Assert - mock_parse_col_def.assert_called_once_with(data=data, table="foo", col_def=col_def, extra_info=None) + mock_parse_col_def.assert_called_once_with( + data=data, table="foo", col_def=col_def, table_mask=None, extra_info=None + ) assert result.empty @@ -737,15 +861,15 @@ def test_parse_pandas_function__invalid(mock_parse_col_def: MagicMock, converter # Act / Assert with pytest.raises(AssertionError): - converter._parse_pandas_function(data=MagicMock(), table="foo", fn_name="multiply", col_def=123) # type: ignore + converter._parse_pandas_function(data=MagicMock(), table="foo", fn_name="multiply", col_def=123, table_mask=None) # type: ignore # Act / Assert with pytest.raises(ValueError, match="Pandas DataFrame has no function 'bar'"): - converter._parse_pandas_function(data=MagicMock(), table="foo", fn_name="bar", col_def=[]) + converter._parse_pandas_function(data=MagicMock(), table="foo", fn_name="bar", col_def=[], table_mask=None) # Act / Assert with pytest.raises(ValueError, match="Invalid pandas function DataFrame.apply"): - converter._parse_pandas_function(data=MagicMock(), table="foo", fn_name="apply", col_def=[]) + converter._parse_pandas_function(data=MagicMock(), table="foo", fn_name="apply", col_def=[], table_mask=None) @patch("power_grid_model_io.converters.tabular_converter.get_function") @@ -767,6 +891,7 @@ def multiply_by_two(value: int): table="nodes", function="multiply_by_two", col_def={"value": "u_nom"}, + table_mask=None, ) assert_frame_equal(multiplied_data, pd.DataFrame([4, 8, 10])) @@ -790,17 +915,21 @@ def multiply_by_two(value: int): table="nodes", function="multiply_by_two", col_def={"value": "u_nom"}, + table_mask=None, ) def test_parse_col_def_composite(converter: TabularConverter, tabular_data_no_units_no_substitutions: TabularData): with pytest.raises(AssertionError): converter._parse_col_def_composite( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def="wrong" # type: ignore + data=tabular_data_no_units_no_substitutions, table="nodes", col_def="wrong", table_mask=None # type: ignore ) df = converter._parse_col_def_composite( - data=tabular_data_no_units_no_substitutions, table="nodes", col_def=["id_number", "u_nom"] + data=tabular_data_no_units_no_substitutions, + table="nodes", + col_def=["id_number", "u_nom"], + table_mask=None, ) assert_frame_equal(df, tabular_data_no_units_no_substitutions["nodes"]) @@ -1017,3 +1146,38 @@ def test_lookup_ids__duplicate_keys(converter: TabularConverter): # Assert pd.testing.assert_frame_equal(reference, pd.DataFrame([[123, 456]], columns=["table", "name"], index=[0])) + + +@pytest.mark.parametrize( + ("bool_fn", "expected"), + [((True), np.array([True, True])), ((False), np.array([False, False]))], +) +@patch("power_grid_model_io.converters.tabular_converter.get_function") +def test_parse_table_filters( + mock_get_function: MagicMock, + converter: TabularConverter, + tabular_data: TabularData, + bool_fn: Callable, + expected: np.ndarray, +): + filtering_functions = [{"test_fn": {"kwarg_1": "a"}}] + + def bool_fn_filter(row: pd.Series, **kwargs): + assert kwargs == {"kwarg_1": "a"} + return bool_fn + + mock_get_function.return_value = bool_fn_filter + + actual = converter._parse_table_filters(data=tabular_data, table="nodes", filtering_functions=filtering_functions) + + mock_get_function.assert_called_once_with("test_fn") + # check if return value is a 1d bool np array + assert isinstance(actual, np.ndarray) + assert actual.ndim == 1 + assert actual.dtype is np.dtype("bool") + assert all(actual == expected) + + +def test_parse_table_filters__ndarray_data(converter: TabularConverter): + numpy_tabular_data = TabularData(numpy_table=np.ones((4, 3))) + assert converter._parse_table_filters(data=numpy_tabular_data, table="numpy_table", filtering_functions=[]) == None diff --git a/tests/unit/functions/test_filters.py b/tests/unit/functions/test_filters.py new file mode 100644 index 00000000..4b760554 --- /dev/null +++ b/tests/unit/functions/test_filters.py @@ -0,0 +1,52 @@ +# SPDX-FileCopyrightText: Contributors to the Power Grid Model project +# +# SPDX-License-Identifier: MPL-2.0 +from typing import Tuple +from unittest.mock import MagicMock, patch + +import pandas as pd +from pytest import approx, mark + +from power_grid_model_io.functions.filters import exclude_all_columns_empty_or_zero, exclude_empty, exclude_value + + +@patch("power_grid_model_io.functions.filters.has_value") +def test_exclude_empty(mock_has_value: MagicMock): + col = "foo" + row = pd.Series({"foo": 1, "bar": "xyz"}) + actual = exclude_empty(row, col) + mock_has_value.assert_called_once_with(row[col]) + assert actual == mock_has_value.return_value + + +@mark.parametrize( + ("row_value", "check_value", "expected"), + [ + (4.0, "x", True), + (3.0, 3.0, False), + (3.2, 3.1, True), + ], +) +def test_exclude_value(row_value: float, check_value: float, expected: bool): + row = pd.Series({"foo": row_value}) + actual = exclude_value(row=row, col="foo", value=check_value) + assert actual == expected + + +@mark.parametrize( + ("row_value", "expected"), + [ + ((1.0, 2.0), True), + ((1.0, 0.0), True), + ((0.0, 1.0), True), + ((0.0, 0.0), False), + ((1.0, 3.0), True), + (("", 1.0), True), + (("", 0.0), False), + (("", ""), False), + ], +) +def test_exclude_all_columns_empty_or_zero(row_value: Tuple[float, float], expected: bool): + row = pd.Series({"foo": row_value[0], "bar": row_value[1]}) + actual = exclude_all_columns_empty_or_zero(row=row, cols=["foo", "bar"]) + assert actual == expected diff --git a/tests/validation/converters/test_vision_excel_converter.py b/tests/validation/converters/test_vision_excel_converter.py index 1c438cef..8240abad 100644 --- a/tests/validation/converters/test_vision_excel_converter.py +++ b/tests/validation/converters/test_vision_excel_converter.py @@ -6,8 +6,9 @@ import logging from functools import lru_cache from pathlib import Path -from typing import List, Tuple +from typing import List, Optional, Tuple +import numpy as np import pandas as pd import pytest from power_grid_model.data_types import SingleDataset @@ -242,27 +243,31 @@ def test_get_get_appliance_id(language: str, table: str, columns: List[str]): @pytest.mark.parametrize( - ("language", "table", "name", "columns"), + ("language", "table", "name", "columns", "filtering_mask"), [ - ("en", "Transformer loads", "transformer", ["Node.Number", "Subnumber"]), - ("en", "Transformer loads", "internal_node", ["Node.Number", "Subnumber"]), - ("en", "Transformer loads", "load", ["Node.Number", "Subnumber"]), - ("en", "Transformer loads", "generation", ["Node.Number", "Subnumber"]), - ("en", "Transformer loads", "pv_generation", ["Node.Number", "Subnumber"]), - ("nl", "Transformatorbelastingen", "transformer", ["Knooppunt.Nummer", "Subnummer"]), - ("nl", "Transformatorbelastingen", "internal_node", ["Knooppunt.Nummer", "Subnummer"]), - ("nl", "Transformatorbelastingen", "load", ["Knooppunt.Nummer", "Subnummer"]), - ("nl", "Transformatorbelastingen", "generation", ["Knooppunt.Nummer", "Subnummer"]), - ("nl", "Transformatorbelastingen", "pv_generation", ["Knooppunt.Nummer", "Subnummer"]), + ("en", "Transformer loads", "transformer", ["Node.Number", "Subnumber"], None), + ("en", "Transformer loads", "internal_node", ["Node.Number", "Subnumber"], None), + ("en", "Transformer loads", "load", ["Node.Number", "Subnumber"], None), + ("en", "Transformer loads", "generation", ["Node.Number", "Subnumber"], np.array([False, True])), + ("en", "Transformer loads", "pv_generation", ["Node.Number", "Subnumber"], np.array([False, True])), + ("nl", "Transformatorbelastingen", "transformer", ["Knooppunt.Nummer", "Subnummer"], None), + ("nl", "Transformatorbelastingen", "internal_node", ["Knooppunt.Nummer", "Subnummer"], None), + ("nl", "Transformatorbelastingen", "load", ["Knooppunt.Nummer", "Subnummer"], None), + ("nl", "Transformatorbelastingen", "generation", ["Knooppunt.Nummer", "Subnummer"], np.array([False, True])), + ("nl", "Transformatorbelastingen", "pv_generation", ["Knooppunt.Nummer", "Subnummer"], np.array([False, True])), ], ) -def test_get_get_virtual_id(language: str, table: str, name: str, columns: List[str]): +def test_get_get_virtual_id( + language: str, table: str, name: str, columns: List[str], filtering_mask: Optional[np.ndarray] +): # Arrange converter = vision_excel_converter(language=language) _, extra_info = load_and_convert_excel_file(language=language) assert converter._source is not None source_data = converter._source.load()[table][columns] + if filtering_mask is not None: + source_data = source_data[filtering_mask] # Act/Assert assert isinstance(source_data, pd.DataFrame) @@ -332,11 +337,11 @@ def test_guid_extra_info(): vision_cvtr = prep_vision_97(language=LANGUAGE_EN) _, extra_info = vision_cvtr.load_input_data() - assert extra_info[0]["GUID"] == "{8110C18E-7FDF-4A43-B868-E05E1DD7909F}" - assert extra_info[1]["GUID"] == "{D507B771-AC81-41F5-A292-91CF6119BE70}" - assert extra_info[2]["GUID"] == "{C92556BA-57E6-4780-A69E-DDDCA759BFBE}" - assert extra_info[3]["GUID"] == "{23C17CB9-7C1F-4AD0-B3C9-9B4AB99DB400}" - assert extra_info[4]["GUID"] == "{915748FF-2EE0-4C5E-806A-DBBFC433BEA4}" + assert extra_info[0]["GUID"] == "{7FF722ED-33B3-4761-84AC-A164310D3C86}" + assert extra_info[1]["GUID"] == "{1ED177A7-1F5D-4D81-8DE7-AB3E58512E0B}" + assert extra_info[2]["GUID"] == "{DDE3457B-DB9A-4DA9-9564-6F49E0F296BD}" + assert extra_info[3]["GUID"] == "{A79AFDE9-4096-4BEB-AB63-2B851D7FC6D1}" + assert extra_info[4]["GUID"] == "{7848DBC8-9685-452C-89AF-9AB308224689}" for i in range(5, len(extra_info)): assert "GUID" not in extra_info[i]