From 091019c2dd214219bfe644791632636e5ab4215c Mon Sep 17 00:00:00 2001 From: Sujan Adhikari <109404840+Sujanadh@users.noreply.github.com> Date: Mon, 29 Jul 2024 22:26:04 +0545 Subject: [PATCH] feat: endpoint to create dataset and bulk upload entities (#276) * feat: added endpoint to create dataset and bulk upload entities * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * refactor: rename OdkEntity class --> OdkDataset for clarity BREAKING CHANGE to OdkCentral usage * build: update test odk central --> v2024.1.0 * build: ignore server_name for test proxy (else warnings) * refactor: remove entity registration form to archived xlsforms * docs: add note about latest state of entity creation apis * fix: large refactor to OdkCentralAsync, raise errors when required * test: update entity tests to support latest bulk entity upload api --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: spwoodcock --- docker-compose.yml | 2 +- docs/about/odk-entities.md | 3 + docs/api/OdkCentral.md | 5 + docs/api/OdkCentralAsync.md | 2 +- nginx/odk.conf | 2 +- osm_fieldwork/OdkCentral.py | 6 +- osm_fieldwork/OdkCentralAsync.py | 206 ++++++++++++++---- osm_fieldwork/xlsforms/__init__.py | 4 +- .../{ => archived}/entities_registration.xls | Bin .../xlsforms/entities_registration.xml | 1 - pyproject.toml | 2 +- tests/conftest.py | 75 ++++--- tests/test_entities.py | 113 +++++----- 13 files changed, 285 insertions(+), 136 deletions(-) rename osm_fieldwork/xlsforms/{ => archived}/entities_registration.xls (100%) delete mode 100644 osm_fieldwork/xlsforms/entities_registration.xml diff --git a/docker-compose.yml b/docker-compose.yml index 5234bfe8a..00192689f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -68,7 +68,7 @@ services: restart: "unless-stopped" central: - image: "ghcr.io/hotosm/fmtm/odkcentral:v2023.5.0" + image: "ghcr.io/hotosm/fmtm/odkcentral:v2024.1.0" depends_on: central-db: condition: service_healthy diff --git a/docs/about/odk-entities.md b/docs/about/odk-entities.md index 5377e117b..dfd5bae30 100644 --- a/docs/about/odk-entities.md +++ b/docs/about/odk-entities.md @@ -33,6 +33,9 @@ use the Entities to achieve the above goals. ### Workflow Using Entities +> **UPDATE 29/07/2023** ODK Central now supports creating the Entity List +> / Dataset via API instead of registration form. + The basic workflow would probably resemble: - Create an Entity registration form. diff --git a/docs/api/OdkCentral.md b/docs/api/OdkCentral.md index 3f504dab0..94679aea1 100644 --- a/docs/api/OdkCentral.md +++ b/docs/api/OdkCentral.md @@ -24,3 +24,8 @@ heading_level: 3 options: show_source: false heading_level: 3 + +::: osm_fieldwork.OdkCentral.OdkDataset +options: +show_source: false +heading_level: 3 diff --git a/docs/api/OdkCentralAsync.md b/docs/api/OdkCentralAsync.md index da9c68b01..2f9528c3d 100644 --- a/docs/api/OdkCentralAsync.md +++ b/docs/api/OdkCentralAsync.md @@ -10,7 +10,7 @@ options: show_source: false heading_level: 3 -::: osm_fieldwork.OdkCentralAsync.OdkEntity +::: osm_fieldwork.OdkCentralAsync.OdkDataset options: show_source: false heading_level: 3 diff --git a/nginx/odk.conf b/nginx/odk.conf index 2a206ee52..00b42b67d 100644 --- a/nginx/odk.conf +++ b/nginx/odk.conf @@ -63,7 +63,7 @@ server { server { listen 443 ssl; - server_name odk.fmtm.localhost; + server_name _; ssl_certificate /etc/nginx/central-fullchain.crt; ssl_certificate_key /etc/nginx/central.key; diff --git a/osm_fieldwork/OdkCentral.py b/osm_fieldwork/OdkCentral.py index 0c8341d4e..b3c30aa59 100755 --- a/osm_fieldwork/OdkCentral.py +++ b/osm_fieldwork/OdkCentral.py @@ -1358,7 +1358,7 @@ def createQRCode( return self.qrcode -class OdkEntity(OdkCentral): +class OdkDataset(OdkCentral): """Class to manipulate a Entity on an ODK Central server.""" def __init__( @@ -1373,7 +1373,7 @@ def __init__( passwd (str): The user's account password on ODK Central. Returns: - (OdkEntity): An instance of this object. + (OdkDataset): An instance of this object. """ super().__init__(url, user, passwd) self.name = None @@ -1404,6 +1404,8 @@ def listDatasets( result = self.session.get(url, verify=self.verify) return result.json() + # TODO add createDataset + def listEntities( self, projectId: int, diff --git a/osm_fieldwork/OdkCentralAsync.py b/osm_fieldwork/OdkCentralAsync.py index 8290305ae..75749860b 100755 --- a/osm_fieldwork/OdkCentralAsync.py +++ b/osm_fieldwork/OdkCentralAsync.py @@ -20,7 +20,7 @@ import logging import os from asyncio import gather -from typing import Optional +from typing import Any, Optional, TypedDict from uuid import uuid4 import aiohttp @@ -28,7 +28,16 @@ log = logging.getLogger(__name__) +class EntityIn(TypedDict): + """Required format for Entity uploads to ODK Central.""" + + label: str + data: dict[str, Any] + + class OdkCentral(object): + """Helper methods for ODK Central API.""" + def __init__( self, url: Optional[str] = None, @@ -141,8 +150,9 @@ async def listForms(self, projectId: int, metadata: bool = False): self.forms = await response.json() return self.forms except aiohttp.ClientError as e: - log.error(f"Error fetching forms: {e}") - return [] + msg = f"Error fetching forms: {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e async def listSubmissions(self, projectId: int, xform: str, filters: dict = None): """Fetch a list of submission instances for a given form. @@ -167,8 +177,9 @@ async def listSubmissions(self, projectId: int, xform: str, filters: dict = None async with self.session.get(url, params=filters, ssl=self.verify) as response: return await response.json() except aiohttp.ClientError as e: - log.error(f"Error fetching submissions: {e}") - return {} + msg = f"Error fetching submissions: {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e async def getAllProjectSubmissions(self, projectId: int, xforms: list = None, filters: dict = None): """Fetch a list of submissions in a project on an ODK Central server. @@ -196,7 +207,7 @@ async def getAllProjectSubmissions(self, projectId: int, xforms: list = None, fi return submission_data -class OdkEntity(OdkCentral): +class OdkDataset(OdkCentral): """Class to manipulate a Entity on an ODK Central server.""" def __init__( @@ -211,7 +222,7 @@ def __init__( passwd (str): The user's account password on ODK Central. Returns: - (OdkEntity): An instance of this object. + (OdkDataset): An instance of this object. """ super().__init__(url, user, passwd) @@ -242,8 +253,108 @@ async def listDatasets( async with self.session.get(url, ssl=self.verify) as response: return await response.json() except aiohttp.ClientError as e: - log.error(f"Error fetching datasets: {e}") - return [] + msg = f"Error fetching datasets: {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e + + async def createDataset( + self, + projectId: int, + datasetName: Optional[str] = "features", + properties: Optional[list[str]] = [], + ): + """Creates a dataset for a given project. + + Args: + projectId (int): The ID of the project to create the dataset for. + datasetName (str): The name of the dataset to be created. + properties (list[str]): List of property names to create. + Alternatively call createDatasetProperty for each property manually. + + Returns: + dict: The JSON response containing information about the created dataset. + + Raises: + aiohttp.ClientError: If an error occurs during the dataset creation process. + """ + # Create the dataset + url = f"{self.base}projects/{projectId}/datasets" + payload = {"name": datasetName} + try: + log.info(f"Creating dataset ({datasetName}) for project ({projectId})") + async with self.session.post( + url, + ssl=self.verify, + json=payload, + ) as response: + if response.status not in (200, 201): + error_message = await response.text() + log.error(f"Failed to create Dataset: {error_message}") + log.info(f"Successfully created Dataset {datasetName}") + dataset = await response.json() + except aiohttp.ClientError as e: + msg = f"Failed to create Dataset: {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e + + if not properties: + return dataset + + # Add the properties, if specified + # FIXME this is a bit of a hack until ODK Central has better support + # FIXME for adding dataset properties in bulk + try: + log.debug(f"Creating properties for dataset ({datasetName}): {properties}") + properties_tasks = [self.createDatasetProperty(projectId, field, datasetName) for field in properties] + success = await gather(*properties_tasks, return_exceptions=True) # type: ignore + if not success: + log.warning(f"No properties were uploaded for ODK project ({projectId}) dataset name ({datasetName})") + log.info(f"Successfully created properties for dataset ({datasetName})") + except aiohttp.ClientError as e: + msg = f"Failed to create properties: {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e + + # Manually append to prevent another API call + dataset["properties"] = properties + return dataset + + async def createDatasetProperty( + self, + projectId: int, + field_name: str, + datasetName: Optional[str] = "features", + ): + """Create a property for a dataset. + + Args: + projectId (int): The ID of the project. + datasetName (str): The name of the dataset. + field (dict): A dictionary containing the field information. + + Returns: + dict: The response data from the API. + + Raises: + aiohttp.ClientError: If an error occurs during the API request. + """ + url = f"{self.base}projects/{projectId}/datasets/{datasetName}/properties" + payload = { + "name": field_name, + } + + try: + log.debug(f"Creating property of dataset {datasetName}") + async with self.session.post(url, ssl=self.verify, json=payload) as response: + response_data = await response.json() + if response.status not in (200, 201): + log.debug(f"Failed to create properties: {response.status}, message='{response_data}'") + log.debug(f"Successfully created properties for dataset {datasetName}") + return response_data + except aiohttp.ClientError as e: + msg = f"Failed to create properties: {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e async def listEntities( self, @@ -288,8 +399,9 @@ async def listEntities( async with self.session.get(url, ssl=self.verify) as response: return await response.json() except aiohttp.ClientError as e: - log.error(f"Error fetching entities: {e}") - return [] + msg = f"Error fetching entities: {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e async def getEntity( self, @@ -341,6 +453,7 @@ async def getEntity( async with self.session.get(url, ssl=self.verify) as response: return await response.json() except aiohttp.ClientError as e: + # NOTE skip raising exception on HTTP 404 (not found) log.error(f"Error fetching entity: {e}") return {} @@ -424,44 +537,47 @@ async def createEntity( ) as response: return await response.json() except aiohttp.ClientError as e: - log.error(f"Failed to create Entity: {e}") - return {} + msg = f"Failed to create Entity: {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e async def createEntities( self, projectId: int, datasetName: str, - labelDataDict: dict, - ) -> list: + entities: list[EntityIn], + ) -> dict: """Bulk create Entities in a project dataset (entity list). - NOTE this endpoint will be redundant after Central 2024.01 release. - Args: projectId (int): The ID of the project on ODK Central. datasetName (int): The name of a dataset, specific to a project. - labelDataDict (dict): Mapping of Entity label:data (str:dict) to insert. + entities (list[EntityIn]): A list of Entities to insert. + Format: {"label": "John Doe", "data": {"firstName": "John", "age": "22"}} Returns: - list: A list of Entity detail JSONs. - The 'uuid' field includes the unique entity identifier. + # list: A list of Entity detail JSONs. + # The 'uuid' field includes the unique entity identifier. + dict: {'success': true} + When creating bulk entities ODK Central return this for now. """ - log.info(f"Bulk uploading Entities for project ({projectId}) dataset ({datasetName})") - entity_data = [] - - entity_tasks = [self.createEntity(projectId, datasetName, label, data) for label, data in labelDataDict.items()] - log.info(f"Creating ({len(entity_tasks)}) entities for project " f"({projectId}) dataset ({datasetName})") - entities = await gather(*entity_tasks, return_exceptions=True) + # Validation + if not isinstance(entities, list): + raise ValueError("Entities must be a list") - for entity in entities: - if not entity or isinstance(entity, Exception): - continue - entity_data.append(entity) - - if not entities: - log.warning(f"No entities were uploaded for ODK project ({projectId}) dataset name ({datasetName})") + log.info(f"Bulk uploading ({len(entities)}) Entities for project ({projectId}) dataset ({datasetName})") + url = f"{self.base}projects/{projectId}/datasets/{datasetName}/entities" + payload = {"entities": entities, "source": {"name": "features.csv"}} - return entity_data + try: + async with self.session.post(url, ssl=self.verify, json=payload) as response: + response.raise_for_status() + log.info(f"Successfully created entities for project ({projectId}) in dataset ({datasetName})") + return await response.json() + except aiohttp.ClientError as e: + msg = f"Failed to create Entities: {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e async def updateEntity( self, @@ -554,8 +670,9 @@ async def updateEntity( ) as response: return await response.json() except aiohttp.ClientError as e: - log.error(f"Failed to update Entity: {e}") - return {} + msg = f"Failed to update Entity: {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e async def deleteEntity( self, @@ -585,8 +702,9 @@ async def deleteEntity( log.debug(f"Server returned deletion unsuccessful: {response_msg}") return success except aiohttp.ClientError as e: - log.error(f"Failed to delete Entity: {e}") - return False + msg = f"Failed to delete Entity: {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e async def getEntityCount( self, @@ -608,11 +726,12 @@ async def getEntityCount( async with self.session.get(url, ssl=self.verify) as response: count = (await response.json()).get("@odata.count", None) except aiohttp.ClientError as e: - log.error(f"Failed to get Entity count for project ({projectId}): {e}") - return 0 + msg = f"Failed to get Entity count for project ({projectId}): {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e if count is None: - log.debug(f"Failed to get Entity count for project ({projectId}) " f"dataset ({datasetName})") + log.debug(f"Project ({projectId}) has no Entities in dataset ({datasetName})") return 0 return count @@ -715,5 +834,6 @@ async def getEntityData( return response_json.get("value", []) return response_json except aiohttp.ClientError as e: - log.error(f"Failed to get Entity data: {e}") - return {} + msg = f"Failed to get Entity data for project ({projectId}): {e}" + log.error(msg) + raise aiohttp.ClientError(msg) from e diff --git a/osm_fieldwork/xlsforms/__init__.py b/osm_fieldwork/xlsforms/__init__.py index 83b6e4a58..3900723ed 100644 --- a/osm_fieldwork/xlsforms/__init__.py +++ b/osm_fieldwork/xlsforms/__init__.py @@ -4,7 +4,5 @@ xlsforms_path = os.path.dirname(os.path.abspath(__file__)) -entities_registration = f"{xlsforms_path}/entities_registration.xls" -entities_registration_xml = f"{xlsforms_path}/entities_registration.xml" - buildings = f"{xlsforms_path}/buildings.xls" +healthcare = f"{xlsforms_path}/health.xls" diff --git a/osm_fieldwork/xlsforms/entities_registration.xls b/osm_fieldwork/xlsforms/archived/entities_registration.xls similarity index 100% rename from osm_fieldwork/xlsforms/entities_registration.xls rename to osm_fieldwork/xlsforms/archived/entities_registration.xls diff --git a/osm_fieldwork/xlsforms/entities_registration.xml b/osm_fieldwork/xlsforms/entities_registration.xml deleted file mode 100644 index 362d88d46..000000000 --- a/osm_fieldwork/xlsforms/entities_registration.xml +++ /dev/null @@ -1 +0,0 @@ -Entity Creation Form \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 1b0e4f1ab..06d534dca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -117,7 +117,7 @@ exclude = [ [tool.ruff.lint] select = ["I", "E", "W", "D", "B", "F", "N", "Q"] ignore = ["N805", "B008"] -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "osm_fieldwork/basemapper.py" = ["N802"] [tool.ruff.lint.pydocstyle] convention = "google" diff --git a/tests/conftest.py b/tests/conftest.py index 73bda10f4..4190563d3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -25,8 +25,7 @@ import pytest from osm_fieldwork.OdkCentral import OdkAppUser, OdkForm, OdkProject -from osm_fieldwork.OdkCentralAsync import OdkEntity -from osm_fieldwork.xlsforms import entities_registration_xml +from osm_fieldwork.OdkCentralAsync import OdkDataset logging.basicConfig( level="DEBUG", @@ -155,51 +154,69 @@ def odk_form_cleanup(odk_form): assert success -# NOTE this is session scoped as odk_entity_cleanup depends on it @pytest.fixture(scope="session") -def odk_entity(project_details) -> tuple: - """Get entity for a project.""" +async def odk_dataset(project_details) -> tuple: + """Get dataset (entity list) for a project.""" odk_id = project_details.get("id") - entity = OdkEntity( + dataset = OdkDataset( url="https://proxy", user="test@hotosm.org", passwd="Password1234", ) - return odk_id, entity + # Create the dataset + async with dataset as odk_dataset: + created_dataset = await odk_dataset.createDataset( + odk_id, + "features", + [ + "geometry", + "project_id", + "task_id", + "osm_id", + "tags", + "version", + "changeset", + "timestamp", + "status", + ], + ) + assert created_dataset.get("name") == "features" + assert sorted(created_dataset.get("properties", [])) == sorted( + [ + "geometry", + "project_id", + "task_id", + "osm_id", + "tags", + "version", + "changeset", + "timestamp", + "status", + ] + ) + + return odk_id, dataset -# NOTE this is session scoped to avoid attempting to create duplicate form -@pytest.fixture(scope="session") -async def odk_entity_cleanup(odk_entity): - """Get Entity for project, with automatic cleanup after.""" - odk_id, entity = odk_entity - # Create entity registration form - form = OdkForm( - entity.url, - entity.user, - entity.passwd, - ) - form_name = form.createForm(odk_id, str(entities_registration_xml), publish=True) - if not form_name: - raise AssertionError("Failed to create form") +@pytest.fixture(scope="session") +async def odk_dataset_cleanup(odk_dataset): + """Get Dataset for project, with automatic cleanup after.""" + odk_id, dataset = odk_dataset dataset_name = "features" - async with entity: - entity_json = await entity.createEntity(odk_id, dataset_name, "test entity", {"osm_id": "1", "geometry": "test"}) + async with dataset as odk_dataset: + entity_json = await odk_dataset.createEntity(odk_id, dataset_name, "test entity", {"osm_id": "1", "geometry": "test"}) entity_uuid = entity_json.get("uuid") # Before yield is used in tests - yield odk_id, dataset_name, entity_uuid, entity + yield odk_id, dataset_name, entity_uuid, dataset # After yield is test cleanup - async with entity: - entity_deleted = await entity.deleteEntity(odk_id, dataset_name, entity_uuid) + async with dataset as odk_dataset: + entity_deleted = await odk_dataset.deleteEntity(odk_id, dataset_name, entity_uuid) assert entity_deleted - form_deleted = form.deleteForm(odk_id, form_name) - assert form_deleted - @pytest.fixture(scope="session", autouse=True) def cleanup(): diff --git a/tests/test_entities.py b/tests/test_entities.py index 527a3cdba..72d7b2328 100644 --- a/tests/test_entities.py +++ b/tests/test_entities.py @@ -20,11 +20,13 @@ from datetime import datetime, timezone import pytest +from aiohttp.client_exceptions import ClientError -async def test_entity_modify(odk_entity_cleanup): +async def test_entity_modify(odk_dataset_cleanup): """Test modifying an entity.""" - odk_id, dataset_name, entity_uuid, entity = odk_entity_cleanup + odk_id, dataset_name, entity_uuid, entity = odk_dataset_cleanup + print(dataset_name) async with entity: updated_entity = await entity.updateEntity(odk_id, dataset_name, entity_uuid, label="new label") assert updated_entity.get("currentVersion").get("label") == "new label" @@ -38,66 +40,74 @@ async def test_entity_modify(odk_entity_cleanup): assert new_data.get("project_id") == "100" -async def test_create_invalid_entities(odk_entity_cleanup): +async def test_create_invalid_entities(odk_dataset_cleanup): """Test uploading invalid data to an entity (HTTP 400).""" - odk_id, dataset_name, entity_uuid, entity = odk_entity_cleanup + odk_id, dataset_name, entity_uuid, entity = odk_dataset_cleanup async with entity: # NOTE entity must have a geometry data field with pytest.raises(ValueError): await entity.createEntity(odk_id, dataset_name, label="test", data={"status": 0}) - # NOTE data fields cannot be integer, this should 400 response - invalid_data_type = await entity.createEntity(odk_id, dataset_name, label="test", data={"geometry": "", "status": 0}) - assert invalid_data_type == {} - - bulk_entities_one_invaid = await entity.createEntities( - odk_id, - dataset_name, - { - "test entity 2": {"osm_id": 55, "geometry": "test"}, - "test entity 3": {"osm_id": "66", "geometry": "test"}, - }, - ) - assert len(bulk_entities_one_invaid) == 1 + # NOTE data fields cannot be integer, this should raise error + with pytest.raises(ClientError): + await entity.createEntity(odk_id, dataset_name, label="test", data={"geometry": "", "status": 0}) + + # Also test bulk entity create using integer data + with pytest.raises(ClientError): + await entity.createEntities( + odk_id, + dataset_name, + [ + {"label": "test entity 2", "data": {"osm_id": 55, "geometry": "test"}}, + {"label": "test entity 3", "data": {"osm_id": "66", "geometry": "test"}}, + ], + ) + + # Bulk Entity creation, not a list + with pytest.raises(ValueError): + await entity.createEntities( + odk_id, + dataset_name, + {"label": "test", "data": {}}, + ) -async def test_bulk_create_entity_count(odk_entity_cleanup): +async def test_bulk_create_entity_count(odk_dataset_cleanup): """Test bulk creation of Entities.""" - odk_id, dataset_name, entity_uuid, entity = odk_entity_cleanup + odk_id, dataset_name, entity_uuid, entity = odk_dataset_cleanup async with entity: - created_entities = await entity.createEntities( + await entity.createEntities( odk_id, dataset_name, - { - "test entity 1": {"osm_id": "44", "geometry": "test"}, - "test entity 2": {"osm_id": "55", "geometry": "test"}, - "test entity 3": {"osm_id": "66", "geometry": "test"}, - }, + [ + {"label": "test entity 1", "data": {"osm_id": "44", "geometry": "test"}}, + {"label": "test entity 2", "data": {"osm_id": "55", "geometry": "test"}}, + {"label": "test entity 3", "data": {"osm_id": "66", "geometry": "test"}}, + ], ) entity_count = await entity.getEntityCount(odk_id, dataset_name) - assert created_entities[0].get("currentVersion").get("data").get("geometry") == "test" # NOTE this may be cumulative from the session... either 4 or 5 assert entity_count >= 4 -async def test_get_entity_data(odk_entity_cleanup): +async def test_get_entity_data(odk_dataset_cleanup): """Test getting entity data, inluding via a OData filter.""" - odk_id, dataset_name, entity_uuid, entity = odk_entity_cleanup + odk_id, dataset_name, entity_uuid, entity = odk_dataset_cleanup async with entity: - new_entities = await entity.createEntities( + await entity.createEntities( odk_id, dataset_name, - { - "test entity 1": {"geometry": "test"}, - "test entity 2": {"geometry": "test"}, - "test entity 3": {"geometry": "test"}, - "test entity 4": {"geometry": "test"}, - "test entity 5": {"geometry": "test"}, - "test entity 6": {"geometry": "test"}, - "test entity 7": {"geometry": "test"}, - "test entity 8": {"geometry": "test"}, - }, + [ + {"label": "test entity 1", "data": {"geometry": "test"}}, + {"label": "test entity 2", "data": {"geometry": "test"}}, + {"label": "test entity 3", "data": {"geometry": "test"}}, + {"label": "test entity 4", "data": {"geometry": "test"}}, + {"label": "test entity 5", "data": {"geometry": "test"}}, + {"label": "test entity 6", "data": {"geometry": "test"}}, + {"label": "test entity 7", "data": {"geometry": "test"}}, + {"label": "test entity 8", "data": {"geometry": "test"}}, + ], ) all_entities = await entity.getEntityData(odk_id, dataset_name) @@ -113,18 +123,12 @@ async def test_get_entity_data(odk_entity_cleanup): assert filtered_entities.get("@odata.count") >= 9 assert "@odata.nextLink" in filtered_entities.keys() - entity_uuids = [_entity.get("uuid") for _entity in new_entities] - - # Update all entities, so updatedAt is not 'None' - for uuid in entity_uuids: - await entity.updateEntity(odk_id, dataset_name, uuid, data={"status": "READY"}) - # Get current time NOTE time format = 2022-01-31T23:59:59.999Z time_now = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%fZ") - # Update last 3 entities prior to filter - entity_uuids = [_entity.get("uuid") for _entity in new_entities] - for uuid in entity_uuids[5:]: + # Update first 5 entities prior to filter + entity_uuids = [_entity.get("__id") for _entity in all_entities] + for uuid in sorted(entity_uuids[:5]): await entity.updateEntity(odk_id, dataset_name, uuid, data={"status": "LOCKED_FOR_MAPPING"}) filter_updated = await entity.getEntityData( @@ -132,13 +136,14 @@ async def test_get_entity_data(odk_entity_cleanup): dataset_name, url_params=f"$filter=__system/updatedAt gt {time_now}", ) - assert len(filter_updated) == 3 - assert filter_updated[0].get("status") == "LOCKED_FOR_MAPPING" + assert len(filter_updated) == 5 + for entity in filter_updated: + assert entity.get("status") == "LOCKED_FOR_MAPPING" -async def test_get_entity_data_select_params(odk_entity_cleanup): +async def test_get_entity_data_select_params(odk_dataset_cleanup): """Test selecting specific param for an Entity.""" - odk_id, dataset_name, entity_uuid, entity = odk_entity_cleanup + odk_id, dataset_name, entity_uuid, entity = odk_dataset_cleanup async with entity: entities_select_params = await entity.getEntityData( odk_id, @@ -153,9 +158,9 @@ async def test_get_entity_data_select_params(odk_entity_cleanup): assert "geometry" in first_entity, "Missing 'geometry' key" -async def test_get_single_entity(odk_entity_cleanup): +async def test_get_single_entity(odk_dataset_cleanup): """Test getting specific Entity by UUID.""" - odk_id, dataset_name, entity_uuid, entity = odk_entity_cleanup + odk_id, dataset_name, entity_uuid, entity = odk_dataset_cleanup async with entity: single_entity = await entity.getEntity( odk_id,