Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

BugFix / Capital enums made lower case #792

Merged
merged 2 commits into from
Oct 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 2 additions & 9 deletions src/power_grid_model/core/power_grid_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -466,22 +466,15 @@ def _filter_attributes(self, attributes):
for key in keys_to_remove:
del attributes[key]

def _filter_with_option(self):
if self._data_filter is ComponentAttributeFilterOptions.RELEVANT:
for attributes in self._data.values():
self._filter_attributes(attributes)

Jerry-Jinfeng-Guo marked this conversation as resolved.
Show resolved Hide resolved
def _filter_with_mapping(self):
for component_type, attributes in self._data.items():
if component_type in self._data_filter:
filter_option = self._data_filter[component_type]
if filter_option is ComponentAttributeFilterOptions.RELEVANT:
if filter_option is ComponentAttributeFilterOptions.relevant:
self._filter_attributes(attributes)

def _post_filtering(self):
if isinstance(self._data_filter, ComponentAttributeFilterOptions):
self._filter_with_option()
elif isinstance(self._data_filter, dict):
if isinstance(self._data_filter, dict):
self._filter_with_mapping()
Jerry-Jinfeng-Guo marked this conversation as resolved.
Show resolved Hide resolved


Expand Down
4 changes: 2 additions & 2 deletions src/power_grid_model/enum.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ class _ExperimentalFeatures(IntEnum):
class ComponentAttributeFilterOptions(IntEnum):
"""Filter option component or attribute"""

ALL = 0
everything = 0
"""Filter all components/attributes"""
RELEVANT = 1
relevant = 1
"""Filter only non-empty components/attributes that contain non-NaN values"""
17 changes: 10 additions & 7 deletions tests/unit/test_internal_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -612,15 +612,18 @@ def test_convert_batch_dataset_to_batch_list_invalid_type_sparse(_mock: MagicMoc
convert_batch_dataset_to_batch_list(update_data)


DATA_FILTER_ALL = ComponentAttributeFilterOptions.ALL
DATA_FILTER_RELEVANT = ComponentAttributeFilterOptions.RELEVANT
DATA_FILTER_EVERYTHING = ComponentAttributeFilterOptions.everything
DATA_FILTER_RELEVANT = ComponentAttributeFilterOptions.relevant


@pytest.mark.parametrize(
("data_filter", "expected"),
[
(None, {CT.node: None, CT.sym_load: None, CT.source: None}),
(DATA_FILTER_ALL, {CT.node: DATA_FILTER_ALL, CT.sym_load: DATA_FILTER_ALL, CT.source: DATA_FILTER_ALL}),
(
DATA_FILTER_EVERYTHING,
{CT.node: DATA_FILTER_EVERYTHING, CT.sym_load: DATA_FILTER_EVERYTHING, CT.source: DATA_FILTER_EVERYTHING},
),
(
DATA_FILTER_RELEVANT,
{CT.node: DATA_FILTER_RELEVANT, CT.sym_load: DATA_FILTER_RELEVANT, CT.source: DATA_FILTER_RELEVANT},
Expand All @@ -630,11 +633,11 @@ def test_convert_batch_dataset_to_batch_list_invalid_type_sparse(_mock: MagicMoc
({CT.node: [], CT.sym_load: []}, {CT.node: [], CT.sym_load: []}),
({CT.node: [], CT.sym_load: ["p"]}, {CT.node: [], CT.sym_load: ["p"]}),
({CT.node: None, CT.sym_load: ["p"]}, {CT.node: None, CT.sym_load: ["p"]}),
({CT.node: DATA_FILTER_ALL, CT.sym_load: ["p"]}, {CT.node: DATA_FILTER_ALL, CT.sym_load: ["p"]}),
({CT.node: DATA_FILTER_EVERYTHING, CT.sym_load: ["p"]}, {CT.node: DATA_FILTER_EVERYTHING, CT.sym_load: ["p"]}),
({CT.node: DATA_FILTER_RELEVANT, CT.sym_load: ["p"]}, {CT.node: DATA_FILTER_RELEVANT, CT.sym_load: ["p"]}),
(
{CT.node: DATA_FILTER_ALL, CT.sym_load: DATA_FILTER_ALL},
{CT.node: DATA_FILTER_ALL, CT.sym_load: DATA_FILTER_ALL},
{CT.node: DATA_FILTER_EVERYTHING, CT.sym_load: DATA_FILTER_EVERYTHING},
{CT.node: DATA_FILTER_EVERYTHING, CT.sym_load: DATA_FILTER_EVERYTHING},
),
(
{CT.node: DATA_FILTER_RELEVANT, CT.sym_load: DATA_FILTER_RELEVANT},
Expand Down Expand Up @@ -875,7 +878,7 @@ def compare_row_data(actual_row_data, desired_row_data):
def test_dense_row_data_to_from_col_data(row_data):
# row data to columnar data and back
col_data = compatibility_convert_row_columnar_dataset(
row_data, ComponentAttributeFilterOptions.ALL, DatasetType.update
row_data, ComponentAttributeFilterOptions.everything, DatasetType.update
)
new_row_data = compatibility_convert_row_columnar_dataset(col_data, None, DatasetType.update)
compare_row_data(row_data, new_row_data)
6 changes: 3 additions & 3 deletions tests/unit/test_power_grid_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def input_row():
@pytest.fixture
def input_col(input_row):
return compatibility_convert_row_columnar_dataset(
input_row, ComponentAttributeFilterOptions.RELEVANT, DatasetType.input
input_row, ComponentAttributeFilterOptions.relevant, DatasetType.input
)


Expand Down Expand Up @@ -117,7 +117,7 @@ def update_batch_row():
@pytest.fixture
def update_batch_col(update_batch_row):
return compatibility_convert_row_columnar_dataset(
update_batch_row, ComponentAttributeFilterOptions.RELEVANT, DatasetType.update
update_batch_row, ComponentAttributeFilterOptions.relevant, DatasetType.update
)


Expand Down Expand Up @@ -163,7 +163,7 @@ def test_update_error(model: PowerGridModel):
with pytest.raises(PowerGridError, match="The id cannot be found:"):
model.update(update_data=update_data)
update_data_col = compatibility_convert_row_columnar_dataset(
update_data, ComponentAttributeFilterOptions.RELEVANT, DatasetType.update
update_data, ComponentAttributeFilterOptions.relevant, DatasetType.update
)
with pytest.raises(PowerGridError, match="The id cannot be found:"):
model.update(update_data=update_data_col)
Expand Down
12 changes: 6 additions & 6 deletions tests/unit/test_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,17 +377,17 @@ def serialized_data(request):
@pytest.fixture(
params=[
pytest.param(None, id="All row filter"),
pytest.param(ComponentAttributeFilterOptions.ALL, id="All columnar filter"),
pytest.param(ComponentAttributeFilterOptions.RELEVANT, id="All relevant columnar filter"),
pytest.param(ComponentAttributeFilterOptions.everything, id="All columnar filter"),
pytest.param(ComponentAttributeFilterOptions.relevant, id="All relevant columnar filter"),
pytest.param({"node": ["id"], "sym_load": ["id"]}, id="columnar filter"),
pytest.param({"node": ["id"], "sym_load": None}, id="mixed columnar/row filter"),
pytest.param({"node": ["id"], "shunt": None}, id="unused component filter"),
pytest.param(
{
"node": ["id"],
"line": ComponentAttributeFilterOptions.ALL,
"line": ComponentAttributeFilterOptions.everything,
"sym_load": None,
"asym_load": ComponentAttributeFilterOptions.RELEVANT,
"asym_load": ComponentAttributeFilterOptions.relevant,
},
id="mixed filter",
),
Expand Down Expand Up @@ -629,9 +629,9 @@ def _check_only_relevant_attributes_present(component_values) -> bool:


def assert_deserialization_filtering_correct(deserialized_dataset: Dataset, data_filter) -> bool:
if data_filter is ComponentAttributeFilterOptions.ALL:
if data_filter is ComponentAttributeFilterOptions.everything:
return True
if data_filter is ComponentAttributeFilterOptions.RELEVANT:
if data_filter is ComponentAttributeFilterOptions.relevant:
for component_values in deserialized_dataset.values():
if not _check_only_relevant_attributes_present(component_values):
return False
Expand Down
8 changes: 4 additions & 4 deletions tests/unit/validation/test_batch_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,14 @@ def original_input_data() -> dict[str, np.ndarray]:
@pytest.fixture
def original_input_data_columnar_all(original_input_data):
return compatibility_convert_row_columnar_dataset(
original_input_data, ComponentAttributeFilterOptions.ALL, DatasetType.input
original_input_data, ComponentAttributeFilterOptions.everything, DatasetType.input
)


@pytest.fixture
def original_input_data_columnar_relevant(original_input_data):
return compatibility_convert_row_columnar_dataset(
original_input_data, ComponentAttributeFilterOptions.RELEVANT, DatasetType.input
original_input_data, ComponentAttributeFilterOptions.relevant, DatasetType.input
)


Expand All @@ -79,14 +79,14 @@ def original_batch_data() -> dict[str, np.ndarray]:
@pytest.fixture
def original_batch_data_columnar_all(original_batch_data):
return compatibility_convert_row_columnar_dataset(
original_batch_data, ComponentAttributeFilterOptions.ALL, DatasetType.update
original_batch_data, ComponentAttributeFilterOptions.everything, DatasetType.update
)


@pytest.fixture
def original_batch_data_columnar_relevant(original_batch_data):
return compatibility_convert_row_columnar_dataset(
original_batch_data, ComponentAttributeFilterOptions.RELEVANT, DatasetType.update
original_batch_data, ComponentAttributeFilterOptions.relevant, DatasetType.update
)


Expand Down
4 changes: 2 additions & 2 deletions tests/unit/validation/test_input_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,14 +278,14 @@ def original_data() -> dict[ComponentType, np.ndarray]:
@pytest.fixture
def original_data_columnar_all(original_data):
return compatibility_convert_row_columnar_dataset(
original_data, ComponentAttributeFilterOptions.ALL, DatasetType.input
original_data, ComponentAttributeFilterOptions.everything, DatasetType.input
)


@pytest.fixture
def original_data_columnar_relevant(original_data):
return compatibility_convert_row_columnar_dataset(
original_data, ComponentAttributeFilterOptions.RELEVANT, DatasetType.input
original_data, ComponentAttributeFilterOptions.relevant, DatasetType.input
)


Expand Down
Loading