Skip to content

Commit

Permalink
PRMP-1066 add RegisteredAtUploaderPractice flag to success and reject…
Browse files Browse the repository at this point in the history
…ed bulk upload reports (#458)

PMRP-1066 add RegisteredAtUploaderPractice flag to success and rejected bulk upload reports
  • Loading branch information
steph-torres-nhs authored Nov 13, 2024
1 parent 60d9fed commit fa8046a
Show file tree
Hide file tree
Showing 9 changed files with 141 additions and 43 deletions.
5 changes: 1 addition & 4 deletions lambdas/enums/metadata_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,4 @@ class MetadataReport(StrEnum):
Date = "Date"
Timestamp = "Timestamp"
ID = "ID"

@staticmethod
def list():
return [str(field) for field in MetadataReport]
RegisteredAtUploaderPractice = "RegisteredAtUploaderPractice"
8 changes: 8 additions & 0 deletions lambdas/models/bulk_upload_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from typing import Optional

from enums.metadata_report import MetadataReport
from enums.patient_ods_inactive_status import PatientOdsInactiveStatus
from pydantic import BaseModel, ConfigDict, Field
from pydantic.alias_generators import to_pascal
from utils.audit_logging_setup import LoggingService
Expand All @@ -28,6 +29,13 @@ class BulkUploadReport(BaseModel):
uploader_ods_code: str = Field(alias=MetadataReport.UploaderOdsCode)
reason: Optional[str] = Field(default="", alias=MetadataReport.Reason)

def get_registered_at_uploader_practice_status(self) -> str:
return (
self.pds_ods_code
if self.pds_ods_code in PatientOdsInactiveStatus.list()
else str(self.uploader_ods_code == self.pds_ods_code)
)


def date_string_yyyymmdd(time_now: datetime) -> str:
return time_now.strftime("%Y-%m-%d")
13 changes: 11 additions & 2 deletions lambdas/models/bulk_upload_report_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,6 @@ def populate_report(self):
self.set_unique_failures()

def process_successful_report_item(self, item: BulkUploadReport):
self.total_successful.add((item.nhs_number, item.date))

if item.pds_ods_code == PatientOdsInactiveStatus.SUSPENDED:
self.total_suspended.add((item.nhs_number, item.date))
elif item.pds_ods_code == PatientOdsInactiveStatus.DECEASED:
Expand All @@ -91,6 +89,14 @@ def process_successful_report_item(self, item: BulkUploadReport):
):
self.total_registered_elsewhere.add((item.nhs_number, item.date))

self.total_successful.add(
(
item.nhs_number,
item.date,
item.get_registered_at_uploader_practice_status(),
)
)

def process_failed_report_item(self, item: BulkUploadReport):
is_new_failure = item.nhs_number not in self.failures_per_patient

Expand All @@ -114,6 +120,9 @@ def process_failed_report_item(self, item: BulkUploadReport):
)
}
)
self.failures_per_patient[item.nhs_number][
MetadataReport.RegisteredAtUploaderPractice.value
] = item.get_registered_at_uploader_practice_status()

def set_unique_failures(self):
patients_to_remove = {
Expand Down
21 changes: 19 additions & 2 deletions lambdas/services/bulk_upload_report_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,12 +153,18 @@ def generate_success_report(self, ods_reports: list[OdsReport]):
MetadataReport.NhsNumber,
MetadataReport.UploaderOdsCode,
MetadataReport.Date,
MetadataReport.RegisteredAtUploaderPractice,
]
data_rows = []
for report in ods_reports:
for patient in report.get_sorted(report.total_successful):
data_rows.append(
[str(patient[0]), str(report.uploader_ods_code), str(patient[1])]
[
str(patient[0]),
str(report.uploader_ods_code),
str(patient[1]),
str(patient[2]),
]
)

if data_rows:
Expand Down Expand Up @@ -252,6 +258,7 @@ def generate_rejected_report(self, ods_reports: list[OdsReport]):
MetadataReport.UploaderOdsCode,
MetadataReport.Date,
MetadataReport.Reason,
MetadataReport.RegisteredAtUploaderPractice,
]

data_rows = []
Expand All @@ -263,6 +270,7 @@ def generate_rejected_report(self, ods_reports: list[OdsReport]):
report_item[MetadataReport.UploaderOdsCode],
report_item[MetadataReport.Date],
report_item[MetadataReport.Reason],
report_item[MetadataReport.RegisteredAtUploaderPractice],
]
)

Expand All @@ -276,7 +284,16 @@ def generate_rejected_report(self, ods_reports: list[OdsReport]):
def write_items_to_csv(items: list[BulkUploadReport], csv_file_path: str):
logger.info("Writing scan results to csv file")
with open(csv_file_path, "w") as output_file:
field_names = MetadataReport.list()
field_names = [
MetadataReport.NhsNumber,
MetadataReport.UploadStatus,
MetadataReport.Reason,
MetadataReport.PdsOdsCode,
MetadataReport.UploaderOdsCode,
MetadataReport.FilePath,
MetadataReport.Date,
MetadataReport.Timestamp,
]
dict_writer_object = csv.DictWriter(output_file, fieldnames=field_names)
dict_writer_object.writeheader()
for item in items:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
NhsNumber,UploaderOdsCode,Date,Reason
9000000005,Y12345,2012-01-13,Could not find the given patient on PDS
9000000006,Y12345,2012-01-13,Could not find the given patient on PDS
9000000007,Y12345,2012-01-13,Lloyd George file already exists
9000000014,Z12345,2012-01-13,Could not find the given patient on PDS
9000000015,Z12345,2012-01-13,Could not find the given patient on PDS
9000000016,Z12345,2012-01-13,Lloyd George file already exists
NhsNumber,UploaderOdsCode,Date,Reason,RegisteredAtUploaderPractice
9000000005,Y12345,2012-01-13,Could not find the given patient on PDS,True
9000000006,Y12345,2012-01-13,Could not find the given patient on PDS,True
9000000007,Y12345,2012-01-13,Lloyd George file already exists,True
9000000014,Z12345,2012-01-13,Could not find the given patient on PDS,True
9000000015,Z12345,2012-01-13,Could not find the given patient on PDS,True
9000000016,Z12345,2012-01-13,Lloyd George file already exists,True
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
NhsNumber,UploaderOdsCode,Date
9000000000,Y12345,2012-01-13
9000000001,Y12345,2012-01-13
9000000002,Y12345,2012-01-13
9000000003,Y12345,2012-01-13
9000000004,Y12345,2012-01-13
9000000009,Z12345,2012-01-13
9000000010,Z12345,2012-01-13
9000000011,Z12345,2012-01-13
9000000012,Z12345,2012-01-13
9000000013,Z12345,2012-01-13
NhsNumber,UploaderOdsCode,Date,RegisteredAtUploaderPractice
9000000000,Y12345,2012-01-13,SUSP
9000000001,Y12345,2012-01-13,DECE
9000000002,Y12345,2012-01-13,REST
9000000003,Y12345,2012-01-13,True
9000000004,Y12345,2012-01-13,False
9000000009,Z12345,2012-01-13,SUSP
9000000010,Z12345,2012-01-13,DECE
9000000011,Z12345,2012-01-13,REST
9000000012,Z12345,2012-01-13,False
9000000013,Z12345,2012-01-13,True
28 changes: 28 additions & 0 deletions lambdas/tests/unit/models/test_bulk_upload_report.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from enums.patient_ods_inactive_status import PatientOdsInactiveStatus
from enums.upload_status import UploadStatus
from freezegun import freeze_time
from models.bulk_upload_report import BulkUploadReport
Expand Down Expand Up @@ -27,6 +28,17 @@
"UploaderOdsCode": "Y12345",
}

mock_report = BulkUploadReport(
ID=TEST_UUID,
nhs_number="9000000009",
timestamp=1698661500,
date="2023-10-30",
upload_status=UploadStatus.COMPLETE,
file_path="/9000000009/1of1_Lloyd_George_Record_[Joe Bloggs]_[9000000009]_[25-12-2019].pdf",
pds_ods_code="Y12345",
uploader_ods_code="Y12345",
)


def test_create_successful_upload():
expected = MOCK_DATA_COMPLETE_UPLOAD
Expand Down Expand Up @@ -92,3 +104,19 @@ def test_failed_upload_ids_and_timestamp_are_auto_populated_if_not_given(mock_uu
).model_dump(by_alias=True, exclude_none=True)

assert actual == expected


def test_get_registered_at_uploader_status_returns_true_uploader_ods_and_pds_ods_are_the_same():
assert mock_report.get_registered_at_uploader_practice_status() == "True"


def test_get_registered_at_uploader_status_returns_false_uploader_ods_and_pds_ods_are_different():
mock_report.uploader_ods_code = "Z12345"

assert mock_report.get_registered_at_uploader_practice_status() == "False"


def test_get_registered_at_uploader_status_returns_correct_status():
for status in PatientOdsInactiveStatus.list():
mock_report.pds_ods_code = status
assert mock_report.get_registered_at_uploader_practice_status() == status
35 changes: 20 additions & 15 deletions lambdas/tests/unit/models/test_bulk_upload_report_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,11 +89,11 @@ def test_ods_report_populate_report_populates_successfully():
"9000000007",
},
"total_successful": {
("9000000000", "2012-01-13"),
("9000000001", "2012-01-13"),
("9000000002", "2012-01-13"),
("9000000003", "2012-01-13"),
("9000000004", "2012-01-13"),
("9000000000", "2012-01-13", "SUSP"),
("9000000001", "2012-01-13", "DECE"),
("9000000002", "2012-01-13", "REST"),
("9000000003", "2012-01-13", "True"),
("9000000004", "2012-01-13", "False"),
},
"total_registered_elsewhere": {("9000000004", "2012-01-13")},
"total_suspended": {("9000000000", "2012-01-13")},
Expand All @@ -108,18 +108,21 @@ def test_ods_report_populate_report_populates_successfully():
"Reason": "Could not find the given patient on PDS",
"Timestamp": 1688395681,
"UploaderOdsCode": "Y12345",
MetadataReport.RegisteredAtUploaderPractice.value: "True",
},
"9000000006": {
"Date": "2012-01-13",
"Reason": "Could not find the given patient on PDS",
"Timestamp": 1688395681,
"UploaderOdsCode": "Y12345",
MetadataReport.RegisteredAtUploaderPractice.value: "True",
},
"9000000007": {
"Date": "2012-01-13",
"Reason": "Lloyd George file already exists",
"Timestamp": 1688395681,
"UploaderOdsCode": "Y12345",
MetadataReport.RegisteredAtUploaderPractice.value: "True",
},
},
"unique_failures": {
Expand Down Expand Up @@ -174,6 +177,7 @@ def test_ods_report_process_failed_report_item_handles_failures():
"Reason": old_reason,
"Timestamp": old_time_stamp,
"UploaderOdsCode": TEST_UPLOADER_ODS_1,
MetadataReport.RegisteredAtUploaderPractice.value: "True",
}
}

Expand All @@ -194,6 +198,7 @@ def test_ods_report_process_failed_report_item_handles_failures():
"Reason": newest_reason,
"Timestamp": new_time_stamp,
"UploaderOdsCode": TEST_UPLOADER_ODS_1,
MetadataReport.RegisteredAtUploaderPractice.value: "True",
}
}

Expand Down Expand Up @@ -305,16 +310,16 @@ def test_summary_report_populate_report_populates_successfully():
"9000000015",
},
"total_successful": {
("9000000000", "2012-01-13"),
("9000000001", "2012-01-13"),
("9000000002", "2012-01-13"),
("9000000003", "2012-01-13"),
("9000000004", "2012-01-13"),
("9000000009", "2012-01-13"),
("9000000010", "2012-01-13"),
("9000000011", "2012-01-13"),
("9000000012", "2012-01-13"),
("9000000013", "2012-01-13"),
("9000000000", "2012-01-13", "SUSP"),
("9000000001", "2012-01-13", "DECE"),
("9000000002", "2012-01-13", "REST"),
("9000000003", "2012-01-13", "True"),
("9000000004", "2012-01-13", "False"),
("9000000009", "2012-01-13", "SUSP"),
("9000000010", "2012-01-13", "DECE"),
("9000000011", "2012-01-13", "REST"),
("9000000012", "2012-01-13", "False"),
("9000000013", "2012-01-13", "True"),
},
"total_registered_elsewhere": {
("9000000004", "2012-01-13"),
Expand Down
38 changes: 36 additions & 2 deletions lambdas/tests/unit/services/test_bulk_upload_report_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,13 +439,21 @@ def test_generate_summary_report_with_two_ods_reports(
def test_generate_success_report_writes_csv(
bulk_upload_report_service, mock_get_times_for_scan, mocker
):
mock_file_name = (
f"daily_statistical_report_bulk_upload_success_{MOCK_TIMESTAMP}.csv"
)
bulk_upload_report_service.write_and_upload_additional_reports = mocker.MagicMock()

test_ods_reports = bulk_upload_report_service.generate_ods_reports(
MOCK_REPORT_ITEMS_ALL,
)

bulk_upload_report_service.generate_success_report(test_ods_reports)
expected = readfile("expected_success_report.csv")
with open(f"/tmp/{mock_file_name}") as test_file:
actual = test_file.read()
assert expected == actual
os.remove(f"/tmp/{mock_file_name}")

bulk_upload_report_service.write_and_upload_additional_reports.assert_called()

Expand Down Expand Up @@ -545,6 +553,9 @@ def test_generate_restricted_report_does_not_write_when_no_data(
def test_generate_rejected_report_writes_csv(
bulk_upload_report_service, mock_get_times_for_scan, mocker
):
mock_file_name = (
f"daily_statistical_report_bulk_upload_rejected_{MOCK_TIMESTAMP}.csv"
)
bulk_upload_report_service.write_and_upload_additional_reports = mocker.MagicMock()

test_ods_reports = bulk_upload_report_service.generate_ods_reports(
Expand All @@ -553,6 +564,12 @@ def test_generate_rejected_report_writes_csv(

bulk_upload_report_service.generate_rejected_report(test_ods_reports)

expected = readfile("expected_rejected_report.csv")
with open(f"/tmp/{mock_file_name}") as test_file:
actual = test_file.read()
assert expected == actual
os.remove(f"/tmp/{mock_file_name}")

bulk_upload_report_service.write_and_upload_additional_reports.assert_called()


Expand Down Expand Up @@ -580,6 +597,7 @@ def test_write_and_upload_additional_reports_creates_csv_and_writes_to_s3(
MetadataReport.UploaderOdsCode,
MetadataReport.Date,
MetadataReport.Reason,
MetadataReport.RegisteredAtUploaderPractice,
]

mock_data_rows = [
Expand All @@ -588,27 +606,43 @@ def test_write_and_upload_additional_reports_creates_csv_and_writes_to_s3(
"Y12345",
"2012-01-13",
"Could not find the given patient on PDS",
"True",
],
[
"9000000006",
"Y12345",
"2012-01-13",
"Could not find the given patient on PDS",
"True",
],
[
"9000000007",
"Y12345",
"2012-01-13",
"Lloyd George file already exists",
"True",
],
["9000000007", "Y12345", "2012-01-13", "Lloyd George file already exists"],
[
"9000000014",
"Z12345",
"2012-01-13",
"Could not find the given patient on PDS",
"True",
],
[
"9000000015",
"Z12345",
"2012-01-13",
"Could not find the given patient on PDS",
"True",
],
[
"9000000016",
"Z12345",
"2012-01-13",
"Lloyd George file already exists",
"True",
],
["9000000016", "Z12345", "2012-01-13", "Lloyd George file already exists"],
]

bulk_upload_report_service.write_and_upload_additional_reports(
Expand Down

0 comments on commit fa8046a

Please sign in to comment.