Skip to content

Commit

Permalink
PRMP-1122 add attachment to pointer
Browse files Browse the repository at this point in the history
  • Loading branch information
NogaNHS committed Dec 6, 2024
1 parent c6f35bd commit d530219
Show file tree
Hide file tree
Showing 7 changed files with 42 additions and 26 deletions.
2 changes: 2 additions & 0 deletions lambdas/models/nhs_document_reference.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ def __init__(
sub_folder: str = "",
doc_type: str = "",
uploading: bool = False,
size: int = None,
) -> None:
date_now = datetime.now(timezone.utc)

Expand All @@ -47,6 +48,7 @@ def __init__(
self.file_location = self.set_file_location()
self.uploading = uploading
self.last_updated = int(date_now.timestamp())
self.size = size

def set_file_location(self):
file_location = f"s3://{self.s3_bucket_name}"
Expand Down
2 changes: 1 addition & 1 deletion lambdas/models/nrl_sqs_message.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@


class NrlAttachment(BaseModel):
content_type: str = ""
content_type: str = "application/pdf"
language: str = "en-US"
url: str = ""
size: int = 0
Expand Down
5 changes: 5 additions & 0 deletions lambdas/repositories/bulk_upload/bulk_upload_s3_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,3 +96,8 @@ def rollback_transaction(self):

def file_exists_on_staging_bucket(self, file_key: str) -> bool:
return self.s3_repository.file_exist_on_s3(self.staging_bucket_name, file_key)

def file_size_on_lg_bycket(self, file_key: str) -> int:
return self.s3_repository.get_file_size(
s3_bucket_name=self.lg_bucket_name, object_key=file_key
)
4 changes: 4 additions & 0 deletions lambdas/services/base/s3_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,3 +139,7 @@ def list_all_objects(self, bucket_name: str) -> list[dict]:
for paginated_result in s3_paginator.paginate(Bucket=bucket_name):
s3_list_objects_result += paginated_result.get("Contents", [])
return s3_list_objects_result

def get_file_size(self, s3_bucket_name: str, object_key: str) -> int:
response = self.client.head_object(Bucket=s3_bucket_name, Key=object_key)
return response.get("ContentLength", 0)
39 changes: 27 additions & 12 deletions lambdas/services/bulk_upload_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from enums.upload_status import UploadStatus
from enums.virus_scan_result import VirusScanResult
from models.nhs_document_reference import NHSDocumentReference
from models.nrl_sqs_message import NrlSqsMessage
from models.nrl_sqs_message import NrlAttachment, NrlSqsMessage
from models.staging_metadata import MetadataFile, StagingMetadata
from repositories.bulk_upload.bulk_upload_dynamo_repository import (
BulkUploadDynamoRepository,
Expand Down Expand Up @@ -234,7 +234,9 @@ def handle_sqs_message(self, message: dict):
)

try:
self.create_lg_records_and_copy_files(staging_metadata, patient_ods_code)
last_document_processed = self.create_lg_records_and_copy_files(
staging_metadata, patient_ods_code
)
logger.info(
f"Successfully uploaded the Lloyd George records for patient: {staging_metadata.nhs_number}",
{"Result": "Successful upload"},
Expand Down Expand Up @@ -271,15 +273,26 @@ def handle_sqs_message(self, message: dict):
accepted_reason,
patient_ods_code,
)

nrl_sqs_message = NrlSqsMessage(
nhs_number=staging_metadata.nhs_number, action=NrlActionTypes.CREATE
)
self.sqs_repository.send_message_to_nrl_fifo(
queue_url=self.nrl_queue_url,
message=nrl_sqs_message,
group_id=f"nrl_sqs_{uuid.uuid4()}",
)
if len(file_names) == 1:
file_size = self.s3_repository.file_size_on_lg_bycket(
last_document_processed.s3_file_key()
)
doc_details = NrlAttachment(
url=last_document_processed.id,
size=file_size,
title=last_document_processed.file_name,
creation=last_document_processed.created,
)
nrl_sqs_message = NrlSqsMessage(
nhs_number=staging_metadata.nhs_number,
action=NrlActionTypes.CREATE,
attachment=doc_details,
)
self.sqs_repository.send_message_to_nrl_fifo(
queue_url=self.nrl_queue_url,
message=nrl_sqs_message,
group_id=f"nrl_sqs_{uuid.uuid4()}",
)

def resolve_source_file_path(self, staging_metadata: StagingMetadata):
sample_file_path = staging_metadata.files[0].file_path
Expand Down Expand Up @@ -325,7 +338,7 @@ def create_lg_records_and_copy_files(
self, staging_metadata: StagingMetadata, current_gp_ods: str
):
nhs_number = staging_metadata.nhs_number

document_reference = None
for file_metadata in staging_metadata.files:
document_reference = self.convert_to_document_reference(
file_metadata, nhs_number, current_gp_ods
Expand All @@ -339,6 +352,8 @@ def create_lg_records_and_copy_files(
)
document_reference.set_uploaded_to_true()
self.dynamo_repository.create_record_in_lg_dynamo_table(document_reference)
# returning last document ref until stitching as default is implemented
return document_reference

def rollback_transaction(self):
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def test_send_message_to_nrl_sqs_fifo(set_env, repo_under_test):
)
message_body = TEST_NRL_SQS_MESSAGE
repo_under_test.sqs_repository.send_message_fifo.assert_called_with(
queue_url="https://test-queue.com",
queue_url=NRL_SQS_URL,
message_body=message_body.model_dump_json(),
group_id="123",
)
14 changes: 2 additions & 12 deletions lambdas/tests/unit/services/test_bulk_upload_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,10 @@

import pytest
from botocore.exceptions import ClientError
from enums.nrl_sqs_upload import NrlActionTypes
from enums.patient_ods_inactive_status import PatientOdsInactiveStatus
from enums.upload_status import UploadStatus
from enums.virus_scan_result import SCAN_RESULT_TAG_KEY, VirusScanResult
from freezegun import freeze_time
from models.nrl_sqs_message import NrlSqsMessage
from models.pds_models import Patient
from repositories.bulk_upload.bulk_upload_s3_repository import BulkUploadS3Repository
from repositories.bulk_upload.bulk_upload_sqs_repository import BulkUploadSqsRepository
Expand Down Expand Up @@ -203,9 +201,7 @@ def test_handle_sqs_message_happy_path(
mock_ods_validation,
):
TEST_STAGING_METADATA.retries = 0
mock_nrl_message = NrlSqsMessage(
nhs_number=TEST_STAGING_METADATA.nhs_number, action=NrlActionTypes.CREATE
)

mock_create_lg_records_and_copy_files = mocker.patch.object(
BulkUploadService, "create_lg_records_and_copy_files"
)
Expand All @@ -223,11 +219,7 @@ def test_handle_sqs_message_happy_path(
)
mock_report_upload_complete.assert_called()
mock_remove_ingested_file_from_source_bucket.assert_called()
repo_under_test.sqs_repository.send_message_to_nrl_fifo.assert_called_with(
queue_url="https://test-queue.com",
message=mock_nrl_message,
group_id=f"nrl_sqs_{mock_uuid}",
)
repo_under_test.sqs_repository.send_message_to_nrl_fifo.assert_not_called()


def set_up_mocks_for_non_ascii_files(
Expand Down Expand Up @@ -483,7 +475,6 @@ def test_handle_sqs_message_calls_report_upload_successful_when_patient_is_forma
"Patient is deceased - FORMAL",
PatientOdsInactiveStatus.DECEASED,
)
repo_under_test.sqs_repository.send_message_to_nrl_fifo.assert_called()


def test_handle_sqs_message_calls_report_upload_successful_when_patient_is_informally_deceased_and_historical(
Expand Down Expand Up @@ -521,7 +512,6 @@ def test_handle_sqs_message_calls_report_upload_successful_when_patient_is_infor
"Patient matched on historical name, Patient is deceased - INFORMAL",
"Y12345",
)
repo_under_test.sqs_repository.send_message_to_nrl_fifo.assert_called()


def test_handle_sqs_message_calls_report_upload_successful_when_patient_has_historical_name_and_rest(
Expand Down

0 comments on commit d530219

Please sign in to comment.