Skip to content

Commit

Permalink
addressed comments
Browse files Browse the repository at this point in the history
Signed-off-by: Jerry Guo <Jerry.Jinfeng.Guo@alliander.com>
  • Loading branch information
Jerry-Jinfeng-Guo committed Oct 30, 2024
1 parent 963cac2 commit 679e9b8
Show file tree
Hide file tree
Showing 5 changed files with 29 additions and 70 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,7 @@ template <dataset_type_tag dataset_type_> class Dataset {
if (data == nullptr && std::ranges::all_of(attributes, [](auto const& x) { return x.data == nullptr; })) {
return invalid_index;
}
assert(data == nullptr); // assume colomnar buffer

auto const found = std::ranges::find_if(
attributes, [attr_name](auto const& x) { return x.meta_attribute->name == attr_name; });
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,16 +39,16 @@ inline void get_component_sequence(MainModelState<ComponentContainer> const& sta
auto idx_getter_default = [&state](UpdateType const& update) {
return get_component_idx_by_id<Component>(state, update.id);
};
auto idx_getter_func = [&state, n_comp_elements](auto index) {
auto idx_getter_func = [&state](auto index) {
Idx const group = get_component_group_idx<Component>(state);
assert(index < n_comp_elements);
return Idx2D{group, index};
};

std::ranges::transform(begin, end, destination, [&, index = 0](UpdateType const& update) mutable {
if (n_comp_elements == na_Idx) {
return idx_getter_default(update);
}
assert(index < n_comp_elements);
return idx_getter_func(index++); // NOSONAR
});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@ class MainModelImpl<ExtraRetrievableTypes<ExtraRetrievableType...>, ComponentLis
bool no_id_row() const { return !is_columnar && (!has_id && ids_all_na); }
bool qualify_for_optional_id() const { return ids_match && ids_all_na && !ids_part_na; }
bool provided_ids_valid() const { return has_id && ids_match && !ids_all_na && !ids_part_na; }
bool is_empty_component() const { return !has_id && ids_all_na; }
bool is_independent() const { return qualify_for_optional_id() || provided_ids_valid(); }
Idx get_n_elements() const {
auto const prov_n_elements = uniform ? elements_ps_in_update : elements_in_base;
Expand Down Expand Up @@ -412,15 +413,11 @@ class MainModelImpl<ExtraRetrievableTypes<ExtraRetrievableType...>, ComponentLis
&comp_independence]<typename CT>() -> std::vector<Idx2D> {
// TODO: (jguo) this function could be encapsulated in UpdateCompIndependence in update.hpp
Idx const n_comp_elements = [&comp_independence]() {
if (!comp_independence.empty()) {
auto const comp_idx =
if (auto const comp_idx =
std::ranges::find_if(comp_independence, [](auto const& comp) { return comp.name == CT::name; });
if (comp_idx == comp_independence.end()) {
return na_Idx;
}
auto const& comp = *comp_idx;
if (comp.no_id_col() || comp.no_id_row()) {
return comp.get_n_elements();
comp_idx != comp_independence.end()) {
if ((*comp_idx).no_id_col() || (*comp_idx).no_id_row()) {
return (*comp_idx).get_n_elements();
}
}
return na_Idx;
Expand Down Expand Up @@ -774,8 +771,8 @@ class MainModelImpl<ExtraRetrievableTypes<ExtraRetrievableType...>, ComponentLis
std::vector<std::vector<bool>> ids_na{};
for (const auto& span : all_spans) {
std::vector<bool> id_na{};
for (const auto& obj : span) {
if constexpr (requires { obj.id; }) {
if constexpr (requires { span.front().id; }) {
for (const auto& obj : span) {
id_na.emplace_back(is_nan(obj.id));
}
}
Expand Down Expand Up @@ -885,49 +882,52 @@ class MainModelImpl<ExtraRetrievableTypes<ExtraRetrievableType...>, ComponentLis
return run_functor_with_all_types_return_vector(check_each_component);
}

std::vector<std::pair<std::string, bool>> is_update_independent(ConstDataset const& update_data) {
std::vector<std::pair<std::string, bool>> result;
std::unordered_map<std::string, bool> is_update_independent(ConstDataset const& update_data) {
std::unordered_map<std::string, bool> result;

// If the batch size is (0 or) 1, then the update data for this component is 'independent'
if (update_data.batch_size() <= 1) {
result.emplace_back("all component", true);
result["all component"] = true;
return result;
}

auto const all_comp_update_independence = check_components_independence(update_data);
for (auto const& comp : all_comp_update_independence) {
result.emplace_back(comp.name, comp.is_independent());
result[comp.name] = comp.is_independent();
}

return result;
}

void validate_update_data_independence(UpdateCompProperties const& comp) const {
if (!comp.has_id && comp.ids_all_na) {
if (comp.is_empty_component()) {
return; // empty dataset is still supported
}
if (comp.elements_in_base < comp.elements_ps_in_update) {
throw DatasetError("Update data has more elements per scenario than input data for component " + comp.name +
"!");
throw DatasetError(
"Dataset error: Update data has more elements per scenario than input data for component " + comp.name +
"!");
}
if (comp.ids_part_na) {
throw DatasetError("Part of the IDs are not valid for component " + comp.name + " in update data!");
throw DatasetError("Dataset error: Some IDs are not valid for component " + comp.name + " in update data!");
}
if (!comp.uniform) {
if (comp.is_columnar && !comp.has_id) {
throw DatasetError("Columnar input data without IDs for component " + comp.name + " is not uniform!");
throw DatasetError("Dataset error: Columnar input data without IDs for component " + comp.name +
" is not uniform!");
}
if (!comp.is_columnar && comp.ids_all_na) {
throw DatasetError("Row based input data with all NA IDs for component " + comp.name +
throw DatasetError("Dataset error: Row based input data with all NA IDs for component " + comp.name +
" is not uniform!");
}
}
if (comp.elements_in_base != comp.elements_ps_in_update) {
if (comp.is_columnar && !comp.has_id) {
throw DatasetError("Columnar input data for component " + comp.name +
throw DatasetError("Dataset error: Columnar input data for component " + comp.name +
" has different number of elements per scenario in update and input data!");
}
if (!comp.is_columnar && comp.uniform && (comp.has_id && comp.ids_all_na)) {
throw DatasetError("Row based input data for component " + comp.name +
throw DatasetError("Dataset error: Row based input data for component " + comp.name +
" has different number of elements per scenario in update and input data!");
}
}
Expand Down
3 changes: 3 additions & 0 deletions tests/cpp_unit_tests/test_container.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,9 @@ TEST_CASE("Test component container") {
CHECK(const_container.get_id_by_idx(Idx2D{0, 0}) == 1);
CHECK(const_container.get_id_by_idx(Idx2D{0, 1}) == 11);
CHECK(const_container.get_id_by_idx(Idx2D{0, 2}) == 111);
CHECK(const_container.get_id_by_idx(Idx2D{1, 0}) == 2);
CHECK(const_container.get_id_by_idx(Idx2D{1, 1}) == 22);
CHECK(const_container.get_id_by_idx(Idx2D{2, 0}) == 3);
}
#endif // NDEBUG
}
Expand Down
47 changes: 1 addition & 46 deletions tests/unit/test_power_grid_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
)
from power_grid_model._utils import compatibility_convert_row_columnar_dataset
from power_grid_model.errors import InvalidCalculationMethod, IterationDiverge, PowerGridBatchError, PowerGridError
from power_grid_model.utils import get_dataset_scenario, json_serialize_to_file
from power_grid_model.utils import get_dataset_scenario
from power_grid_model.validation import assert_valid_input_data

from .utils import compare_result
Expand Down Expand Up @@ -70,51 +70,6 @@ def input_row():
}


@pytest.fixture
def input_col_cpp():
node = initialize_array(DatasetType.input, ComponentType.node, 2)
node["id"] = [0, 4]
node["u_rated"] = [100.0, 100.0]

source = initialize_array(DatasetType.input, ComponentType.source, 1)
source["id"] = 1
source["node"] = 0
source["status"] = 1
source["u_ref"] = 1.0
source["sk"] = 1000.0
source["rx_ratio"] = 0.0

sym_load = initialize_array(DatasetType.input, ComponentType.sym_load, 1)
sym_load["id"] = 2
sym_load["node"] = 0
sym_load["status"] = 1
sym_load["type"] = 2
sym_load["p_specified"] = 0.0
sym_load["q_specified"] = 500.0

line = {}
line["id"] = np.array([5, 6], dtype=power_grid_meta_data[DatasetType.input][ComponentType.line].dtype["id"])
line["from_node"] = np.array(
[0, 4], dtype=power_grid_meta_data[DatasetType.input][ComponentType.line].dtype["from_node"]
)
line["to_node"] = np.array(
[4, 0], dtype=power_grid_meta_data[DatasetType.input][ComponentType.line].dtype["to_node"]
)
line["from_status"] = np.array(
[0, 1], dtype=power_grid_meta_data[DatasetType.input][ComponentType.line].dtype["from_status"]
)
line["to_status"] = np.array(
[1, 0], dtype=power_grid_meta_data[DatasetType.input][ComponentType.line].dtype["to_status"]
)

return {
ComponentType.node: node,
ComponentType.source: source,
ComponentType.sym_load: sym_load,
ComponentType.line: line,
}


@pytest.fixture
def input_col(input_row):
return compatibility_convert_row_columnar_dataset(
Expand Down

0 comments on commit 679e9b8

Please sign in to comment.