Skip to content

Commit

Permalink
Prmdr 369 and 372 (#121)
Browse files Browse the repository at this point in the history
* prmdr-370 initial commit

* prmdr-370 added checks to validtors

* prmdr-370 fix pydantic syntax

* fix unit test

* small fixes

* add unit tests

* unit tests

* [PRMDR-372] Add model for bulk upload status, add test for parsing records

* [PRMDR-372] Modify upload status model for easier interaction with dynamodb

* [PRMDR-372] rebase with prmdr-370

* [PRMDR-372] Add unit test for bulk upload reporting, remove unwanted comments

* [PRMDR-372] Fix mismatch of env var

* [PRMDR-372] fix misspelling, fix missing by_alias=True

* Add bulk upload validation for virus scanner

* [PRMDR-372] hotfix to use mock pds service in LG validation

* [PRMDR-372] replace mocked pds patient ods code

* [PRMDR-372] Fix unit test that was broken during merge

* [PRMDR-372] run formatter

* Merge branch prmdr-369-squash into prmdr-372-subtask

* [PRMDR-372] Fix import error

* [PRMDR-372] modify bulk upload lambda

* [PRMDR-372] Amend send_message_with_nhs_number_attr to have delay_second default as 0 instead of None

* Add get object tag unit testt

* [PRMDR-369] Add unhappy path test for get_tag_value, minor refactoring

* [PRMDR-369] Replace literal strings with enum / constants for virus scan result. Amend logic of check_virus_result to fit ticket description

* [PRMDR-369] Add unit tests for check_virus_result, add logic to handle the case when required file not exist in S3

* [PRMDR-372] Amend mock test data of Bulk Upload to fit default mock patient of mock pds service

* [PRMDR-372] Add unit tests for bulk upload service

* run formatter

* [PRMDR-372] Remove redundant handle_invalid_message method

* run formatter

* [PRMDR-372] Amend test of bulk upload status, use freezegun for patching datetime

* [PRMDR-369] Fix incorrect comparison of Enum and str

* [PRMDR-372] Amend LG Validator to correctly report the cases for too few files and too many files

* [PRMDR-372] Amend LG Validator to give proper failure reason on PDS related errors

* [PRMDR-369] Replace base classs StrEnum with (str, Enum) as github action still using python 3.10

* [PRMDR-372] Undo unintended change to non related code

* [PRMDR-372] minor change on wordings

* [PRMDR-372] Fix bulk upload report lambda possible failure due related to fieldnames issue

* [PRMDR-372] Undo unintended change to scss file by linter

* [PRMDR-372] patch get_user_ods_code to return mock ODS code in sandboxes

* [PRMDR-372] patch get_user_ods_code

* [PRMDR-372] Replace generic Exception catch with specific error types

* [PRMDR-372] Amend one test, update the catch error types at lambda_handler

* [PRMDR-372] Replace custom field name transform function with inflection.camelize

* [PRMDR-372] Change function name for clarity: to_capwords --> to_capitalized_camel

---------

Co-authored-by: NogaNHS <noga.sasson1@nhs.net>
Co-authored-by: Rio Knightley <rio.knightley2@nhs.net>
  • Loading branch information
3 people authored Nov 3, 2023
1 parent 987f5d2 commit f0aafa8
Show file tree
Hide file tree
Showing 24 changed files with 782 additions and 190 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ describe('DeleteAllDocumentsStage', () => {

await waitFor(async () => {
expect(
screen.getByText('Are you sure you want to permanently delete files for:')
screen.getByText('Are you sure you want to permanently delete files for:'),
).toBeInTheDocument();
});

Expand Down Expand Up @@ -97,7 +97,7 @@ describe('DeleteAllDocumentsStage', () => {

await waitFor(async () => {
expect(
screen.getByText('Are you sure you want to permanently delete files for:')
screen.getByText('Are you sure you want to permanently delete files for:'),
).toBeInTheDocument();
});

Expand Down Expand Up @@ -161,7 +161,7 @@ describe('DeleteAllDocumentsStage', () => {

await waitFor(() => {
expect(
screen.getByText('Sorry, the service is currently unavailable.')
screen.getByText('Sorry, the service is currently unavailable.'),
).toBeInTheDocument();
});
});
Expand Down Expand Up @@ -202,7 +202,7 @@ describe('DeleteAllDocumentsStage', () => {
});

const TestApp = (
props: Omit<Props, 'setStage' | 'setIsDeletingDocuments' | 'setDownloadStage'>
props: Omit<Props, 'setStage' | 'setIsDeletingDocuments' | 'setDownloadStage'>,
) => {
return (
<DeleteDocumentsStage
Expand All @@ -221,7 +221,7 @@ const renderComponent = (
history = createMemoryHistory({
initialEntries: [homeRoute],
initialIndex: 1,
})
}),
) => {
const auth: Session = {
auth: buildUserAuth(),
Expand All @@ -240,6 +240,6 @@ const renderComponent = (
<SessionProvider sessionOverride={auth}>
<TestApp {...props} />
</SessionProvider>
</ReactRouter.Router>
</ReactRouter.Router>,
);
};
12 changes: 12 additions & 0 deletions lambdas/enums/virus_scan_result.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from enum import Enum


class VirusScanResult(str, Enum):
CLEAN = "Clean"
INFECTED = "Infected"
INFECTED_ALLOWED = "InfectedAllowed"
UNSCANNABLE = "Unscannable"
ERROR = "Error"


SCAN_RESULT_TAG_KEY = "scan-result"
40 changes: 12 additions & 28 deletions lambdas/handlers/bulk_upload_handler.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import json
import logging
import os

from botocore.exceptions import ClientError
from services.bulk_upload_service import BulkUploadService
from services.sqs_service import SQSService
from utils.exceptions import InvalidMessageException
from utils.lloyd_george_validator import LGInvalidFilesException

Expand All @@ -23,28 +21,14 @@ def lambda_handler(event, _context):
try:
logger.info(f"Processing message {index} of {len(event['Records'])}")
bulk_upload_service.handle_sqs_message(message)
except (InvalidMessageException, LGInvalidFilesException) as error:
handle_invalid_message(invalid_message=message, error=error)


def handle_invalid_message(invalid_message: dict, error=None):
# Currently we just send the invalid message to invalid queue.
# In future ticket, will change this to record errors in dynamo db
invalid_queue_url = os.environ["INVALID_SQS_QUEUE_URL"]
sqs_service = SQSService()

new_message = {"original_message": invalid_message["body"]}
if error:
new_message["error"] = str(error)

try:
nhs_number = invalid_message["messageAttributes"]["NhsNumber"]["stringValue"]
except KeyError:
nhs_number = ""

sqs_service.send_message_with_nhs_number_attr(
queue_url=invalid_queue_url,
message_body=json.dumps(new_message),
nhs_number=nhs_number,
)
logger.info(f"Sent message to invalid queue: {invalid_message}")
except (
ClientError,
InvalidMessageException,
LGInvalidFilesException,
KeyError,
TypeError,
AttributeError,
) as error:
logger.info(f"Fail to process current message due to error: {error}")
logger.info("Continue on next message")
logger.info(f"Finished processing all {len(event['Records'])} messages")
8 changes: 4 additions & 4 deletions lambdas/handlers/bulk_upload_report_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@
import datetime
import logging
import os
from typing import Optional

from boto3.dynamodb.conditions import Attr
from botocore.exceptions import ClientError
from models.bulk_upload_status import FieldNamesForBulkUploadReport
from services.dynamo_service import DynamoDBService
from services.s3_service import S3Service
from utils.decorators.ensure_env_var import ensure_environment_variables
Expand Down Expand Up @@ -51,7 +53,7 @@ def report_handler(db_service, s3_service):

def get_dynamodb_report_items(
db_service, start_timestamp: int, end_timestamp: int
) -> None or list:
) -> Optional[list]:
logger.info("Starting Scan on DynamoDB table")
bulk_upload_table_name = os.getenv("BULK_UPLOAD_DYNAMODB_NAME")
filter_time = Attr("Timestamp").gt(start_timestamp) & Attr("Timestamp").lt(
Expand All @@ -78,12 +80,11 @@ def get_dynamodb_report_items(
def write_items_to_csv(items: list, csv_file_path: str):
logger.info("Writing scan results to csv file")
with open(csv_file_path, "w") as output_file:
field_names = items[0].keys()
field_names = FieldNamesForBulkUploadReport
dict_writer_object = csv.DictWriter(output_file, fieldnames=field_names)
dict_writer_object.writeheader()
for item in items:
dict_writer_object.writerow(item)
output_file.close()


def get_times_for_scan():
Expand All @@ -100,4 +101,3 @@ def get_times_for_scan():
def write_empty_report(file_path: str):
with open(file_path, "w") as output_file:
output_file.write("No data was found for this timeframe")
output_file.close()
12 changes: 1 addition & 11 deletions lambdas/handlers/search_patient_details_handler.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,18 @@
import logging
import os
from json import JSONDecodeError

from pydantic import ValidationError
from services.mock_pds_service import MockPdsApiService
from services.pds_api_service import PdsApiService
from services.ssm_service import SSMService
from utils.decorators.validate_patient_id import validate_patient_id
from utils.exceptions import (InvalidResourceIdException,
PatientNotFoundException, PdsErrorException)
from utils.lambda_response import ApiGatewayResponse
from utils.utilities import get_pds_service

logger = logging.getLogger()
logger.setLevel(logging.INFO)


def get_pds_service():
return (
PdsApiService
if (os.getenv("PDS_FHIR_IS_STUBBED") == "false")
else MockPdsApiService
)


@validate_patient_id
def lambda_handler(event, context):
logger.info("API Gateway event received - processing starts")
Expand Down
41 changes: 41 additions & 0 deletions lambdas/models/bulk_upload_status.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
from datetime import datetime
from typing import Literal

from models.config import to_capitalized_camel
from pydantic import BaseModel, ConfigDict, Field
from utils.utilities import create_reference_id


class UploadStatusBaseClass(BaseModel):
model_config = ConfigDict(
alias_generator=to_capitalized_camel, populate_by_name=True
)
id: str = Field(alias="ID", default_factory=create_reference_id)
nhs_number: str
timestamp: int = Field(default_factory=lambda: int(datetime.now().timestamp()))
date: str = Field(default_factory=lambda: date_string_yyyymmdd(datetime.now()))
file_path: str


class SuccessfulUpload(UploadStatusBaseClass):
upload_status: Literal["complete"] = "complete"


class FailedUpload(UploadStatusBaseClass):
upload_status: Literal["failed"] = "failed"
failure_reason: str


FieldNamesForBulkUploadReport = [
"NhsNumber",
"UploadStatus",
"FailureReason",
"FilePath",
"Date",
"Timestamp",
"ID",
]


def date_string_yyyymmdd(time_now: datetime) -> str:
return time_now.strftime("%Y-%m-%d")
4 changes: 4 additions & 0 deletions lambdas/models/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,7 @@ def to_camel(string: str) -> str:


conf = ConfigDict(alias_generator=to_camel)


def to_capitalized_camel(snake_case_string: str) -> str:
return inflection.camelize(snake_case_string, uppercase_first_letter=True)
Loading

0 comments on commit f0aafa8

Please sign in to comment.