Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[PRMP-1063] blank bulk upload reports don't create a file #455

Merged
merged 5 commits into from
Oct 30, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
105 changes: 60 additions & 45 deletions lambdas/services/bulk_upload_report_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,16 +158,19 @@ def generate_success_report(self, ods_reports: list[OdsReport]):
[str(patient[0]), str(report.uploader_ods_code), str(patient[1])]
)

self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)
if data_rows:
self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)

logger.info("Uploading daily success report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
logger.info("Uploading daily success report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
else:
logger.info("No data to report for daily success report file")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is it possible to take this out to a new function?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It feels like scope creep to me as the only functionality that has changed properly is line 173, though I agree that the code would be cleaner this way. @abbas-khan10 what do you reckon?

Copy link
Contributor

@abbas-khan10 abbas-khan10 Oct 29, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I have no preference, I had considered this when first implementing as there is repetition. What were you thinking @NogaNHS? Something like:

def write_and_upload_additional_reports(file_name, headers, data_rows) -> bool:
    if data_rows:
            self.write_additional_report_items_to_csv(
                file_name=file_name, headers=headers, rows_to_write=data_rows
            )

            logger.info("Uploading daily success report file to S3")
            self.s3_service.upload_file(
                s3_bucket_name=self.reports_bucket,
                file_key=f"{self.s3_key_prefix}/{file_name}",
                file_name=f"/tmp/{file_name}",
            )
            return true
       return false

Copy link
Contributor

@abbas-khan10 abbas-khan10 Oct 29, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The bool would serve as a way of customising the log message for each report type as we want to be specific with which report wasn't created (open to discussion obviously) e.g. for the code above:

report_uploaded = write_and_upload_additional_reports(file_name, headers, data_rows)

if not report_uploaded:
    logger.info("No data to report for daily success report file")

Copy link
Contributor Author

@AndyFlintNHS AndyFlintNHS Oct 29, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As opposed to doing a true/false return, I was going to suggest something more like this, so that it only calls 'write and upload' if it actually needs to:

        if data_rows:
            logger.info("Uploading daily success report file to S3")
            self.write_and_upload_additional_reports(file_name, headers, data_rows)
        else:
            logger.info("No data to report for daily success report file")

    def write_and_upload_additional_reports(self, file_name, headers, data_rows):
        self.write_additional_report_items_to_csv(
            file_name=file_name, headers=headers, rows_to_write=data_rows
        )

        self.s3_service.upload_file(
            s3_bucket_name=self.reports_bucket,
            file_key=f"{self.s3_key_prefix}/{file_name}",
            file_name=f"/tmp/{file_name}",
        )

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Logic has been broken out into a new function


def generate_suspended_report(self, ods_reports: list[OdsReport]):
file_name = (
Expand All @@ -186,16 +189,19 @@ def generate_suspended_report(self, ods_reports: list[OdsReport]):
[str(patient[0]), str(report.uploader_ods_code), str(patient[1])]
)

self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)
if data_rows:
self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)

logger.info("Uploading daily suspended report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
logger.info("Uploading daily suspended report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
else:
logger.info("No data to report for daily suspended report file")

def generate_deceased_report(self, ods_reports: list[OdsReport]):
file_name = (
Expand All @@ -220,16 +226,19 @@ def generate_deceased_report(self, ods_reports: list[OdsReport]):
]
)

self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)
if data_rows:
self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)

logger.info("Uploading daily deceased report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
logger.info("Uploading daily deceased report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
else:
logger.info("No data to report for daily deceased report file")

def generate_restricted_report(self, ods_reports: list[OdsReport]):
file_name = (
Expand All @@ -248,16 +257,19 @@ def generate_restricted_report(self, ods_reports: list[OdsReport]):
[str(patient[0]), str(report.uploader_ods_code), str(patient[1])]
)

self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)
if data_rows:
self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)

logger.info("Uploading daily restricted report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
logger.info("Uploading daily restricted report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
else:
logger.info("No data to report for daily deceased report file")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

daily restricted*

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Corrected


def generate_rejected_report(self, ods_reports: list[OdsReport]):
file_name = (
Expand All @@ -283,16 +295,19 @@ def generate_rejected_report(self, ods_reports: list[OdsReport]):
]
)

self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)
if data_rows:
self.write_additional_report_items_to_csv(
file_name=file_name, headers=headers, rows_to_write=data_rows
)

logger.info("Uploading daily rejected report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
logger.info("Uploading daily rejected report file to S3")
self.s3_service.upload_file(
s3_bucket_name=self.reports_bucket,
file_key=f"{self.s3_key_prefix}/{file_name}",
file_name=f"/tmp/{file_name}",
)
else:
logger.info("No data to report for daily rejected report file")

@staticmethod
def write_items_to_csv(items: list[BulkUploadReport], csv_file_path: str):
Expand Down
90 changes: 90 additions & 0 deletions lambdas/tests/unit/services/test_bulk_upload_report_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -459,6 +459,24 @@ def test_generate_success_report_writes_csv(
)


def test_generate_success_report_does_not_write_when_no_data(
bulk_upload_report_service, mock_get_times_for_scan
):
# just used to assert this isn't created
mock_file_name = (
f"daily_statistical_report_bulk_upload_success_{MOCK_TIMESTAMP}.csv"
)

blank_ods_reports = bulk_upload_report_service.generate_ods_reports([])

bulk_upload_report_service.generate_success_report(blank_ods_reports)

with pytest.raises(FileNotFoundError):
open(f"/tmp/{mock_file_name}")

bulk_upload_report_service.s3_service.upload_file.assert_not_called()


def test_generate_suspended_report_writes_csv(
bulk_upload_report_service, mock_get_times_for_scan
):
Expand All @@ -485,6 +503,24 @@ def test_generate_suspended_report_writes_csv(
)


def test_generate_suspended_report_does_not_write_when_no_data(
bulk_upload_report_service, mock_get_times_for_scan
):
# just used to assert this isn't created
mock_file_name = (
f"daily_statistical_report_bulk_upload_suspended_{MOCK_TIMESTAMP}.csv"
)

blank_ods_reports = bulk_upload_report_service.generate_ods_reports([])

bulk_upload_report_service.generate_suspended_report(blank_ods_reports)

with pytest.raises(FileNotFoundError):
open(f"/tmp/{mock_file_name}")

bulk_upload_report_service.s3_service.upload_file.assert_not_called()


def test_generate_deceased_report_writes_csv(
bulk_upload_report_service, mock_get_times_for_scan
):
Expand All @@ -511,6 +547,24 @@ def test_generate_deceased_report_writes_csv(
)


def test_generate_deceased_report_does_not_write_when_no_data(
bulk_upload_report_service, mock_get_times_for_scan
):
# just used to assert this isn't created
mock_file_name = (
f"daily_statistical_report_bulk_upload_deceased_{MOCK_TIMESTAMP}.csv"
)

blank_ods_reports = bulk_upload_report_service.generate_ods_reports([])

bulk_upload_report_service.generate_deceased_report(blank_ods_reports)

with pytest.raises(FileNotFoundError):
open(f"/tmp/{mock_file_name}")

bulk_upload_report_service.s3_service.upload_file.assert_not_called()


def test_generate_restricted_report_writes_csv(
bulk_upload_report_service, mock_get_times_for_scan
):
Expand All @@ -537,6 +591,24 @@ def test_generate_restricted_report_writes_csv(
)


def test_generate_restricted_report_does_not_write_when_no_data(
bulk_upload_report_service, mock_get_times_for_scan
):
# just used to assert this isn't created
mock_file_name = (
f"daily_statistical_report_bulk_upload_restricted_{MOCK_TIMESTAMP}.csv"
)

blank_ods_reports = bulk_upload_report_service.generate_ods_reports([])

bulk_upload_report_service.generate_restricted_report(blank_ods_reports)

with pytest.raises(FileNotFoundError):
open(f"/tmp/{mock_file_name}")

bulk_upload_report_service.s3_service.upload_file.assert_not_called()


def test_generate_rejected_report_writes_csv(
bulk_upload_report_service, mock_get_times_for_scan
):
Expand All @@ -561,3 +633,21 @@ def test_generate_rejected_report_writes_csv(
file_key=f"bulk-upload-reports/2012-01-13/{mock_file_name}",
file_name=f"/tmp/{mock_file_name}",
)


def test_generate_rejected_report_does_not_write_when_no_data(
bulk_upload_report_service, mock_get_times_for_scan
):
# just used to assert this isn't created
mock_file_name = (
f"daily_statistical_report_bulk_upload_rejected_{MOCK_TIMESTAMP}.csv"
)

blank_ods_reports = bulk_upload_report_service.generate_ods_reports([])

bulk_upload_report_service.generate_rejected_report(blank_ods_reports)

with pytest.raises(FileNotFoundError):
open(f"/tmp/{mock_file_name}")

bulk_upload_report_service.s3_service.upload_file.assert_not_called()
Loading