From 4a622be781770605abae5d7dc251beeb15f1311b Mon Sep 17 00:00:00 2001 From: Jan Range Date: Wed, 29 May 2024 10:56:31 +0200 Subject: [PATCH] fix update not adding new mult compounds --- easyDataverse/base.py | 12 ++-------- tests/integration/test_dataset_update.py | 28 +++++++++++++++++++++++- 2 files changed, 29 insertions(+), 11 deletions(-) diff --git a/easyDataverse/base.py b/easyDataverse/base.py index 4d13c0c..e6bcd12 100644 --- a/easyDataverse/base.py +++ b/easyDataverse/base.py @@ -212,18 +212,10 @@ def _add_changed_multiples(self): if not self._is_multiple(field): continue - value = getattr(self, name) - has_changes = any(value._changed for value in value) - - if has_changes: - self._changed.add(name) + self._changed.add(name) def _process_multiple_compound(self, compounds) -> List[Dict]: - """Whenever a single compound has changed, return all compounds.""" - - if not any(len(compound._changed) for compound in compounds): - return [] - + """Processes multiple compounds""" return [compound.dataverse_dict() for compound in compounds] def _process_single_compound(self, compound) -> Dict: diff --git a/tests/integration/test_dataset_update.py b/tests/integration/test_dataset_update.py index 8670f7a..b2d42e3 100644 --- a/tests/integration/test_dataset_update.py +++ b/tests/integration/test_dataset_update.py @@ -13,7 +13,6 @@ def test_dataset_update( credentials, minimal_upload, ): - # Arrange base_url, api_token = credentials url = f"{base_url}/api/dataverses/root/datasets" @@ -38,6 +37,11 @@ def test_dataset_update( # Fetch the dataset and update the title dataset = dataverse.load_dataset(pid) dataset.citation.title = "Title has changed" + + # Check if multiple compound changes are tracked too + dataset.citation.add_other_id(agency="Software Heritage1", value="softwareid1") + dataset.citation.add_other_id(agency="Software Heritage2", value="softwareid2") + dataset.update() # Re-fetch the dataset @@ -59,10 +63,32 @@ def test_dataset_update( ) ) + other_id_fields = next( + filter( + lambda x: x["typeName"] == "otherId", + updated_dataset["data"]["metadataBlocks"]["citation"]["fields"], + ) + )["value"] + # Assert assert ( title_field["value"] == "Title has changed" ), "The updated dataset title does not match the expected title." + assert ( + len(other_id_fields) == 2 + ), "The updated dataset does not have the expected number of other ids." + assert ( + other_id_fields[0]["otherIdValue"]["value"] == "softwareid1" + ), "The updated dataset does not have the expected other id." + assert ( + other_id_fields[1]["otherIdValue"]["value"] == "softwareid2" + ), "The updated dataset does not have the expected other id." + assert ( + other_id_fields[0]["otherIdAgency"]["value"] == "Software Heritage1" + ), "The updated dataset does not have the expected other id agency." + assert ( + other_id_fields[1]["otherIdAgency"]["value"] == "Software Heritage2" + ), "The updated dataset does not have the expected other id agency." @staticmethod def sort_citation(dataset: Dict):