Skip to content

Commit

Permalink
fixup! Issue #150/#156 integrate DeepGraphSplitter in AggregatorBatch…
Browse files Browse the repository at this point in the history
…Jobs._create_crossbackend_job
  • Loading branch information
soxofaan committed Sep 19, 2024
1 parent 1c82648 commit b1c708e
Showing 1 changed file with 32 additions and 0 deletions.
32 changes: 32 additions & 0 deletions tests/partitionedjobs/test_crossbackend.py
Original file line number Diff line number Diff line change
Expand Up @@ -1048,3 +1048,35 @@ def test_triple_split(self):
_SubGraphData(split_node="merge1", node_ids={"bands1", "merge1", "temporal2", "lc2"}, backend_id="b2"),
],
)

@pytest.mark.parametrize(
["primary_backend", "secondary_graph"],
[
("b1", _SubGraphData(split_node="lc2", node_ids={"lc2"}, backend_id="b2")),
("b2", _SubGraphData(split_node="lc1", node_ids={"lc1"}, backend_id="b1")),
],
)
def test_split_with_primary_backend(self, primary_backend, secondary_graph):
"""Test `primary_backend` argument of DeepGraphSplitter"""
splitter = DeepGraphSplitter(
supporting_backends=supporting_backends_from_node_id_dict({"lc1": ["b1"], "lc2": ["b2"]}),
primary_backend=primary_backend,
)
flat = {
# lc1 lc2
# \ /
# merge
"lc1": {"process_id": "load_collection", "arguments": {"id": "S1"}},
"lc2": {"process_id": "load_collection", "arguments": {"id": "S2"}},
"merge": {
"process_id": "merge_cubes",
"arguments": {"cube1": {"from_node": "lc1"}, "cube2": {"from_node": "lc2"}},
"result": True,
},
}
result = splitter.split(flat)
assert result == _PGSplitResult(
primary_node_ids={"lc1", "lc2", "merge"},
primary_backend_id=primary_backend,
secondary_graphs=[secondary_graph],
)

0 comments on commit b1c708e

Please sign in to comment.