Skip to content

Commit

Permalink
Merge branch 'main' into PRMP-1188
Browse files Browse the repository at this point in the history
  • Loading branch information
oliverbeumkes-nhs authored Dec 13, 2024
2 parents 9a8af85 + 8bdeea0 commit c34ad77
Show file tree
Hide file tree
Showing 17 changed files with 189 additions and 22 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ function DeleteResultStage({ numberOfFiles, setDownloadStage }: Props) {
</Card.Content>
</Card>
<p>You can no longer access this record using our storage.</p>
<h2>What happens next</h2>
<h2 className="nhsuk-heading-l">What happens next</h2>
<ol>
<li>
Make sure you safely store the paper version of this record, as it may be the
Expand All @@ -56,13 +56,13 @@ function DeleteResultStage({ numberOfFiles, setDownloadStage }: Props) {
If you think you’ve made a mistake, you will need to upload this record again
</li>
</ol>
<h3>Your responsibilities after removing this record</h3>
<h3 className="nhsuk-heading-m">Your responsibilities after removing this record</h3>
<p>
Everyone in a health and care organisation is responsible for managing records
appropriately. It is important all general practice staff understand their
responsibilities for creating, and disposing of records appropriately.
</p>
<h3>Follow the Record Management Code of Practice</h3>
<h3 className="nhsuk-heading-m">Follow the Record Management Code of Practice</h3>
<p>
The{' '}
<a
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -130,13 +130,13 @@ function LloydGeorgeDownloadComplete({
</ol>
</>
)}
<h2>Your responsibilities with this record</h2>
<h2 className="nhsuk-heading-l">Your responsibilities with this record</h2>
<p>
Everyone in a health and care organisation is responsible for managing records
appropriately. It is important all general practice staff understand their
responsibilities for creating, maintaining, and disposing of records appropriately.
</p>
<h3>Follow the Record Management Code of Practice</h3>
<h3 className="nhsuk-heading-m">Follow the Record Management Code of Practice</h3>
<p>
The{' '}
<a href="https://transform.england.nhs.uk/information-governance/guidance/records-management-code">
Expand Down
2 changes: 1 addition & 1 deletion lambdas/handlers/manage_nrl_pointer_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def lambda_handler(event, context):
f"Processing SQS message for nhs number: {nrl_message.nhs_number}"
)
nrl_verified_message = nrl_message.model_dump(
by_alias=True, exclude_none=True, exclude_defaults=True
by_alias=True, exclude_none=True
)
match nrl_message.action:
case NrlActionTypes.CREATE:
Expand Down
2 changes: 1 addition & 1 deletion lambdas/models/nrl_fhir_document_reference.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ class FhirDocumentReference(BaseModel):
snomed_code_doc_type: str = "None"
snomed_code_category: str = "None"
snomed_code_category_display: str = "Care plan"
attachment: Optional[NrlAttachment] = {}
attachment: Optional[NrlAttachment] = NrlAttachment()

def build_fhir_dict(self):
snomed_url = "http://snomed.info/sct"
Expand Down
9 changes: 5 additions & 4 deletions lambdas/models/nrl_sqs_message.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
from typing import Optional

from enums.snomed_codes import SnomedCodesCategory, SnomedCodesType
from pydantic import AliasGenerator, BaseModel, ConfigDict
from pydantic.alias_generators import to_camel


class NrlAttachment(BaseModel):
content_type: str = ""
language: str = "en-US"
content_type: str = "application/pdf"
language: str = "en-UK"
url: str = ""
size: int = 0
hash: str = ""
Expand All @@ -20,8 +21,8 @@ class NrlSqsMessage(BaseModel):
)

nhs_number: str
snomed_code_doc_type: str
snomed_code_category: str
snomed_code_doc_type: str = SnomedCodesType.LLOYD_GEORGE
snomed_code_category: str = SnomedCodesCategory.CARE_PLAN
description: str = ""
attachment: Optional[NrlAttachment] = None
action: str
11 changes: 11 additions & 0 deletions lambdas/repositories/bulk_upload/bulk_upload_sqs_repository.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import uuid

from models.nrl_sqs_message import NrlSqsMessage
from models.staging_metadata import StagingMetadata
from services.base.sqs_service import SQSService
from utils.audit_logging_setup import LoggingService
Expand Down Expand Up @@ -38,4 +39,14 @@ def put_sqs_message_back_to_queue(self, sqs_message: dict):
queue_url=self.metadata_queue_url,
message_body=sqs_message["body"],
nhs_number=nhs_number,
group_id=f"back_to_queue_bulk_upload_{uuid.uuid4()}",
)

def send_message_to_nrl_fifo(
self, queue_url: str, message: NrlSqsMessage, group_id: str
):
self.sqs_repository.send_message_fifo(
queue_url=queue_url,
message_body=message.model_dump_json(),
group_id=group_id,
)
4 changes: 4 additions & 0 deletions lambdas/services/base/s3_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,3 +139,7 @@ def list_all_objects(self, bucket_name: str) -> list[dict]:
for paginated_result in s3_paginator.paginate(Bucket=bucket_name):
s3_list_objects_result += paginated_result.get("Contents", [])
return s3_list_objects_result

def get_file_size(self, s3_bucket_name: str, object_key: str) -> int:
response = self.client.head_object(Bucket=s3_bucket_name, Key=object_key)
return response.get("ContentLength", 0)
4 changes: 3 additions & 1 deletion lambdas/services/bulk_upload_metadata_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,9 @@ def csv_to_staging_metadata(csv_file_path: str) -> list[StagingMetadata]:
logger.info("Parsing bulk upload metadata")

patients = {}
with open(csv_file_path, mode="r") as csv_file_handler:
with open(
csv_file_path, mode="r", encoding="utf-8", errors="replace"
) as csv_file_handler:
csv_reader: Iterable[dict] = csv.DictReader(csv_file_handler)
for row in csv_reader:
file_metadata = MetadataFile.model_validate(row)
Expand Down
30 changes: 28 additions & 2 deletions lambdas/services/bulk_upload_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@

import pydantic
from botocore.exceptions import ClientError
from enums.nrl_sqs_upload import NrlActionTypes
from enums.patient_ods_inactive_status import PatientOdsInactiveStatus
from enums.upload_status import UploadStatus
from enums.virus_scan_result import VirusScanResult
from models.nhs_document_reference import NHSDocumentReference
from models.nrl_sqs_message import NrlAttachment, NrlSqsMessage
from models.staging_metadata import MetadataFile, StagingMetadata
from repositories.bulk_upload.bulk_upload_dynamo_repository import (
BulkUploadDynamoRepository,
Expand Down Expand Up @@ -52,6 +54,7 @@ def __init__(self):
self.pdf_content_type = "application/pdf"
self.unhandled_messages = []
self.file_path_cache = {}
self.nrl_queue_url = os.environ["NRL_SQS_URL"]

def process_message_queue(self, records: list):
for index, message in enumerate(records, start=1):
Expand Down Expand Up @@ -231,7 +234,9 @@ def handle_sqs_message(self, message: dict):
)

try:
self.create_lg_records_and_copy_files(staging_metadata, patient_ods_code)
last_document_processed = self.create_lg_records_and_copy_files(
staging_metadata, patient_ods_code
)
logger.info(
f"Successfully uploaded the Lloyd George records for patient: {staging_metadata.nhs_number}",
{"Result": "Successful upload"},
Expand Down Expand Up @@ -268,6 +273,25 @@ def handle_sqs_message(self, message: dict):
accepted_reason,
patient_ods_code,
)
if len(file_names) == 1:
document_api_endpoint = (
os.environ.get("APIM_API_URL", "")
+ "/DocumentReference/"
+ last_document_processed.id
)
doc_details = NrlAttachment(
url=document_api_endpoint,
)
nrl_sqs_message = NrlSqsMessage(
nhs_number=staging_metadata.nhs_number,
action=NrlActionTypes.CREATE,
attachment=doc_details,
)
self.sqs_repository.send_message_to_nrl_fifo(
queue_url=self.nrl_queue_url,
message=nrl_sqs_message,
group_id=f"nrl_sqs_{uuid.uuid4()}",
)

def resolve_source_file_path(self, staging_metadata: StagingMetadata):
sample_file_path = staging_metadata.files[0].file_path
Expand Down Expand Up @@ -313,7 +337,7 @@ def create_lg_records_and_copy_files(
self, staging_metadata: StagingMetadata, current_gp_ods: str
):
nhs_number = staging_metadata.nhs_number

document_reference = None
for file_metadata in staging_metadata.files:
document_reference = self.convert_to_document_reference(
file_metadata, nhs_number, current_gp_ods
Expand All @@ -327,6 +351,8 @@ def create_lg_records_and_copy_files(
)
document_reference.set_uploaded_to_true()
self.dynamo_repository.create_record_in_lg_dynamo_table(document_reference)
# returning last document ref until stitching as default is implemented
return document_reference

def rollback_transaction(self):
try:
Expand Down
2 changes: 1 addition & 1 deletion lambdas/services/nrl_api_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def create_new_pointer(self, body, retry_on_expired: bool = True):
response.raise_for_status()
logger.info("Successfully created new pointer")
except HTTPError as e:
logger.error(e.response)
logger.error(e.response.content)
if e.response.status_code == 401 and retry_on_expired:
self.headers["Authorization"] = (
f"Bearer {self.auth_service.get_active_access_token()}"
Expand Down
3 changes: 3 additions & 0 deletions lambdas/tests/unit/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@

MOCK_ZIP_OUTPUT_BUCKET_ENV_NAME = "ZIPPED_STORE_BUCKET_NAME"
MOCK_ZIP_TRACE_TABLE_ENV_NAME = "ZIPPED_STORE_DYNAMODB_NAME"
MOCK_METADATA_NRL_SQS_URL_ENV_NAME = "NRL_SQS_URL"

MOCK_LG_STAGING_STORE_BUCKET_ENV_NAME = "STAGING_STORE_BUCKET_NAME"
MOCK_LG_METADATA_SQS_QUEUE_ENV_NAME = "METADATA_SQS_QUEUE_URL"
Expand Down Expand Up @@ -77,6 +78,7 @@
TEST_CURRENT_GP_ODS = "Y12345"

AUTH_STATE_TABLE_NAME = "test_state_table"
NRL_SQS_URL = "https://test-queue.com"
AUTH_SESSION_TABLE_NAME = "test_session_table"
FAKE_URL = "https://fake-url.com"
OIDC_CALLBACK_URL = FAKE_URL
Expand Down Expand Up @@ -126,6 +128,7 @@ def set_env(monkeypatch):
monkeypatch.setenv(MOCK_LG_METADATA_SQS_QUEUE_ENV_NAME, MOCK_LG_METADATA_SQS_QUEUE)
monkeypatch.setenv(MOCK_LG_INVALID_SQS_QUEUE_ENV_NAME, MOCK_LG_INVALID_SQS_QUEUE)
monkeypatch.setenv(MOCK_AUTH_STATE_TABLE_NAME_ENV_NAME, AUTH_STATE_TABLE_NAME)
monkeypatch.setenv(MOCK_METADATA_NRL_SQS_URL_ENV_NAME, NRL_SQS_URL)
monkeypatch.setenv(MOCK_AUTH_SESSION_TABLE_NAME_ENV_NAME, AUTH_SESSION_TABLE_NAME)
monkeypatch.setenv(MOCK_OIDC_CALLBACK_URL_ENV_NAME, OIDC_CALLBACK_URL)
monkeypatch.setenv(MOCK_OIDC_CLIENT_ID_ENV_NAME, OIDC_CLIENT_ID)
Expand Down
21 changes: 20 additions & 1 deletion lambdas/tests/unit/helpers/data/bulk_upload/test_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,12 @@
from enums.virus_scan_result import VirusScanResult
from freezegun import freeze_time
from models.nhs_document_reference import NHSDocumentReference
from models.nrl_sqs_message import NrlSqsMessage
from models.staging_metadata import MetadataFile, StagingMetadata
from tests.unit.conftest import MOCK_LG_BUCKET, TEST_CURRENT_GP_ODS, TEST_UUID

from lambdas.enums.nrl_sqs_upload import NrlActionTypes

sample_metadata_model = MetadataFile(
file_path="/1234567890/1of2_Lloyd_George_Record_[Joe Bloggs]_[1234567890]_[25-12-2019].pdf",
page_count="",
Expand Down Expand Up @@ -143,6 +146,15 @@ def build_test_sqs_message_from_nhs_number(nhs_number: str) -> dict:
return build_test_sqs_message(staging_metadata)


def build_test_nrl_sqs_fifo_message(nhs_number: str, action: str) -> NrlSqsMessage:
message_body = {
"nhs_number": nhs_number,
"action": action,
}
nrl_sqs_message = NrlSqsMessage(**message_body)
return nrl_sqs_message


@freeze_time("2024-01-01 12:00:00")
def build_test_document_reference(file_name: str, nhs_number: str = "9000000009"):
doc_ref = NHSDocumentReference(
Expand All @@ -160,8 +172,15 @@ def build_test_document_reference(file_name: str, nhs_number: str = "9000000009"
TEST_NHS_NUMBER_FOR_BULK_UPLOAD = "9000000009"
TEST_STAGING_METADATA = build_test_staging_metadata(make_valid_lg_file_names(3))
TEST_SQS_MESSAGE = build_test_sqs_message(TEST_STAGING_METADATA)
TEST_STAGING_METADATA_SINGLE_FILE = build_test_staging_metadata(
make_valid_lg_file_names(1)
)
TEST_SQS_MESSAGE_SINGLE_FILE = build_test_sqs_message(TEST_STAGING_METADATA_SINGLE_FILE)
TEST_FILE_METADATA = TEST_STAGING_METADATA.files[0]

TEST_GROUP_ID = "123"
TEST_NRL_SQS_MESSAGE = build_test_nrl_sqs_fifo_message(
TEST_NHS_NUMBER_FOR_BULK_UPLOAD, NrlActionTypes.CREATE
)
TEST_STAGING_METADATA_WITH_INVALID_FILENAME = build_test_staging_metadata(
[*make_valid_lg_file_names(2), "invalid_file_name.txt"]
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@

import pytest
from repositories.bulk_upload.bulk_upload_sqs_repository import BulkUploadSqsRepository
from tests.unit.conftest import MOCK_LG_METADATA_SQS_QUEUE
from tests.unit.conftest import MOCK_LG_METADATA_SQS_QUEUE, NRL_SQS_URL
from tests.unit.helpers.data.bulk_upload.test_data import (
TEST_GROUP_ID,
TEST_NHS_NUMBER_FOR_BULK_UPLOAD,
TEST_NRL_SQS_MESSAGE,
TEST_SQS_MESSAGE,
TEST_STAGING_METADATA,
)
Expand Down Expand Up @@ -34,11 +36,26 @@ def test_put_staging_metadata_back_to_queue_and_increases_retries(
)


def test_put_sqs_message_back_to_queue(set_env, repo_under_test):
def test_put_sqs_message_back_to_queue(set_env, repo_under_test, mock_uuid):
repo_under_test.put_sqs_message_back_to_queue(TEST_SQS_MESSAGE)

repo_under_test.sqs_repository.send_message_with_nhs_number_attr_fifo.assert_called_with(
queue_url=MOCK_LG_METADATA_SQS_QUEUE,
message_body=TEST_SQS_MESSAGE["body"],
nhs_number=TEST_NHS_NUMBER_FOR_BULK_UPLOAD,
group_id=f"back_to_queue_bulk_upload_{mock_uuid}",
)


def test_send_message_to_nrl_sqs_fifo(set_env, repo_under_test):
repo_under_test.send_message_to_nrl_fifo(
NRL_SQS_URL,
TEST_NRL_SQS_MESSAGE,
TEST_GROUP_ID,
)
message_body = TEST_NRL_SQS_MESSAGE
repo_under_test.sqs_repository.send_message_fifo.assert_called_with(
queue_url=NRL_SQS_URL,
message_body=message_body.model_dump_json(),
group_id="123",
)
29 changes: 29 additions & 0 deletions lambdas/tests/unit/services/base/test_s3_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,3 +345,32 @@ def test_list_all_objects_raises_client_error_if_unexpected_response(

with pytest.raises(ClientError):
mock_service.list_all_objects(MOCK_BUCKET)


def test_file_size_return_int(mock_service, mock_client):
mock_response = {
"ResponseMetadata": {
"RequestId": "mock_req",
"HostId": "",
"HTTPStatusCode": 200,
"HTTPHeaders": {},
"RetryAttempts": 0,
},
"ContentLength": "3191",
"ETag": '"eb2996dae99afd8308e4c97bdb6a4178"',
"ContentType": "application/pdf",
"Metadata": {},
}

mock_client.head_object.return_value = mock_response

expected = "3191"
actual = mock_service.get_file_size(
s3_bucket_name=MOCK_BUCKET, object_key=TEST_FILE_NAME
)
assert actual == expected

mock_client.head_object.assert_called_once_with(
Bucket=MOCK_BUCKET,
Key=TEST_FILE_NAME,
)
Loading

0 comments on commit c34ad77

Please sign in to comment.