From 2e259f964ad5447ff6ab73a3d8f906683ba30afe Mon Sep 17 00:00:00 2001 From: Matt Garber Date: Mon, 14 Aug 2023 10:37:01 -0400 Subject: [PATCH] touched site upload URL --- src/handlers/site_upload/fetch_upload_url.py | 2 +- src/handlers/site_upload/powerset_merge.py | 2 +- tests/site_upload/test_fetch_upload_url.py | 11 ++++++++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/handlers/site_upload/fetch_upload_url.py b/src/handlers/site_upload/fetch_upload_url.py index fc34502..a1d95d6 100644 --- a/src/handlers/site_upload/fetch_upload_url.py +++ b/src/handlers/site_upload/fetch_upload_url.py @@ -58,6 +58,6 @@ def upload_url_handler(event, context): res = create_presigned_post( os.environ.get("BUCKET_NAME"), f"{BucketPath.UPLOAD.value}/{body['study']}/{body['data_package']}/" - f"{int(version):03d}/{metadata_db[user]['path']}/{body['filename']}", + f"{metadata_db[user]['path']}/{int(version):03d}/{body['filename']}", ) return res diff --git a/src/handlers/site_upload/powerset_merge.py b/src/handlers/site_upload/powerset_merge.py index b3f828f..c754200 100644 --- a/src/handlers/site_upload/powerset_merge.py +++ b/src/handlers/site_upload/powerset_merge.py @@ -46,8 +46,8 @@ def __init__(self, event): self.data_package = s3_key_array[2].split("__")[1] self.site = s3_key_array[3] self.version = s3_key_array[4] - self.metadata = read_metadata(self.s3_client, self.s3_bucket_name) + print(s3_key_array) # S3 Filesystem operations def get_data_package_list(self, path) -> list: diff --git a/tests/site_upload/test_fetch_upload_url.py b/tests/site_upload/test_fetch_upload_url.py index d9f0ed1..766c57b 100644 --- a/tests/site_upload/test_fetch_upload_url.py +++ b/tests/site_upload/test_fetch_upload_url.py @@ -5,9 +5,11 @@ import boto3 import pytest +from src.handlers.shared.enums import BucketPath from src.handlers.site_upload.fetch_upload_url import upload_url_handler from tests.utils import ( EXISTING_DATA_P, + EXISTING_SITE, EXISTING_STUDY, EXISTING_VERSION, TEST_BUCKET, @@ -36,9 +38,16 @@ def test_fetch_upload_url(body, status, mock_bucket): "principalId": "ppth", } } + response = upload_url_handler( {"body": json.dumps(body), "requestContext": context}, None ) - print(response) assert response["statusCode"] == status + if response["statusCode"] == 200: + res_body = json.loads(response["body"]) + assert res_body["fields"]["key"] == ( + f"{BucketPath.UPLOAD.value}/{body['study']}/{body['data_package']}/" + f"{EXISTING_SITE}/{body['data_package_version']}/encounter.parquet" + ) + assert "Access-Control-Allow-Origin" not in response["headers"]