diff --git a/Makefile b/Makefile index d3543018..1e618188 100644 --- a/Makefile +++ b/Makefile @@ -354,6 +354,7 @@ deploy-api: aws lambda update-function-code --function-name ien-$(ENV_NAME)-syncdata --s3-bucket $(API_SRC_BUCKET) --s3-key "api-lambda-s3" --region $(AWS_REGION) > /dev/null aws lambda update-function-code --function-name ien-$(ENV_NAME)-notifylambda --s3-bucket $(API_SRC_BUCKET) --s3-key "api-lambda-s3" --region $(AWS_REGION) > /dev/null aws lambda update-function-code --function-name ien-$(ENV_NAME)-cache-reports --s3-bucket $(API_SRC_BUCKET) --s3-key "api-lambda-s3" --region $(AWS_REGION) > /dev/null + aws lambda update-function-code --function-name ien-$(ENV_NAME)-s3-upload-reports --s3-bucket $(API_SRC_BUCKET) --s3-key "api-lambda-s3" --region $(AWS_REGION) > /dev/null deploy-all: sync-app upload-api-zip deploy-api @echo "Deploying Webapp and API" diff --git a/apps/api/src/report/report.controller.ts b/apps/api/src/report/report.controller.ts index fef9ef84..0476b4be 100644 --- a/apps/api/src/report/report.controller.ts +++ b/apps/api/src/report/report.controller.ts @@ -1,5 +1,7 @@ +/* eslint-disable no-console */ import { Controller, Get, Inject, Logger, Query, UseGuards } from '@nestjs/common'; import { ApiOperation, ApiTags } from '@nestjs/swagger'; +import AWS from 'aws-sdk'; import { Access, ReportPeriodDTO, EmployeeRO } from '@ien/common'; import { AuthGuard } from 'src/auth/auth.guard'; import { AppLogger } from 'src/common/logger.service'; @@ -11,6 +13,8 @@ import { ReportS3Service } from './report.s3.service'; @ApiTags('IEN Reports') @UseGuards(AuthGuard) export class ReportController { + private uploadLambda = new AWS.Lambda(); + constructor( @Inject(Logger) private readonly logger: AppLogger, @Inject(ReportService) private readonly reportService: ReportService, @@ -152,17 +156,13 @@ export class ReportController { @Query() { from, to }: ReportPeriodDTO, @User() user: EmployeeRO, ): Promise { - const data = await this.reportService.extractApplicantsData({ from, to }, user?.ha_pcn_id); - if ( - data?.length > 10 && - process.env.NODE_ENV !== 'test' && - process.env.RUNTIME_ENV !== 'local' - ) { - const key = `ien-applicant-data-extract_${from}-${to}_${user?.user_id}_${Date.now()}`; - await this.reportS3Service.uploadFile(key, data); - return { url: await this.reportS3Service.generatePresignedUrl(key) }; - } - return data; + return this.extractData( + { from, to }, + user, + 'extract-data', + 'applicant', + this.reportService.extractApplicantsData.bind(this.reportService), + ); } @ApiOperation({ summary: 'Extract milestones' }) @Get('/applicant/extract-milestones') @@ -171,16 +171,65 @@ export class ReportController { @Query() { from, to }: ReportPeriodDTO, @User() user: EmployeeRO, ): Promise { - const data = await this.reportService.extractMilestoneData({ to, from }, user?.ha_pcn_id); - if ( - data?.length > 10 && - process.env.NODE_ENV !== 'test' && - process.env.RUNTIME_ENV !== 'local' - ) { - const key = `ien-milestone-data-extract_${from}-${to}_${user?.user_id}_${Date.now()}`; - await this.reportS3Service.uploadFile(key, data); - return { url: await this.reportS3Service.generatePresignedUrl(key) }; + return this.extractData( + { from, to }, + user, + 'extract-milestone', + 'milestone', + this.reportService.extractMilestoneData.bind(this.reportService), + ); + } + + private shouldUseS3(): boolean { + return process.env.NODE_ENV !== 'test' && process.env.RUNTIME_ENV !== 'local'; + } + + private generateS3Key( + from: string, + to: string, + ha_pcn_id: string | undefined | null, + type: 'milestone' | 'applicant', + ): string { + return `ien-${type}-data-extract_${from}-${to}_${ha_pcn_id}_${Date.now()}`; + } + + private async invokeUploadLambda(s3Key: string, param: object, path: string): Promise { + await this.uploadLambda + .invoke({ + FunctionName: `${process.env.NAMESPACE}-s3-upload-reports`, // Name of the second Lambda + InvocationType: 'Event', // Asynchronous invocation + Payload: JSON.stringify({ + s3Key, + param, + path, + }), + }) + .promise(); + } + + private async extractData( + period: ReportPeriodDTO, + user: EmployeeRO, + apiPath: 'extract-data' | 'extract-milestone', + type: 'milestone' | 'applicant', + extractFunction: ( + period: ReportPeriodDTO, + ha_pcn_id: string | undefined | null, + ) => Promise, + ): Promise { + if (this.shouldUseS3()) { + const s3Key = this.generateS3Key(period.from, period.to, user?.user_id, type); + const url = await this.reportS3Service.generatePresignedUrl(s3Key); + + await this.invokeUploadLambda( + s3Key, + { from: period.from, to: period.to, ha_pcn_id: user?.ha_pcn_id }, + apiPath, + ); + return { url }; } + + const data = await extractFunction(period, user?.ha_pcn_id); return data; } } diff --git a/apps/api/src/report/report.s3.service.ts b/apps/api/src/report/report.s3.service.ts index eb5a243d..3dae2af6 100644 --- a/apps/api/src/report/report.s3.service.ts +++ b/apps/api/src/report/report.s3.service.ts @@ -16,7 +16,7 @@ export class ReportS3Service { }); } - async uploadFile(key: string, data: Record): Promise { + async uploadFile(key: string, data: Record): Promise { if (!this.s3) { throw new InternalServerErrorException('the feature is disabled'); } @@ -26,7 +26,7 @@ export class ReportS3Service { Key: key, Body: JSON.stringify(data), }; - await this.s3.upload(params).promise(); + return this.s3.upload(params).promise(); } catch (e) { throw new InternalServerErrorException('failed to upload a report data'); } diff --git a/apps/api/src/report/report.service.ts b/apps/api/src/report/report.service.ts index 6926a26a..1ce5ebbc 100644 --- a/apps/api/src/report/report.service.ts +++ b/apps/api/src/report/report.service.ts @@ -1,6 +1,6 @@ -import { Inject, Logger } from '@nestjs/common'; +import { Inject, Logger, OnModuleDestroy } from '@nestjs/common'; import { mean, median, min, mode, round } from 'mathjs'; -import { getManager, Repository, In, getRepository, Not, ILike } from 'typeorm'; +import { getManager, Repository, In, getRepository, Not, ILike, Connection } from 'typeorm'; import { InjectRepository } from '@nestjs/typeorm'; import dayjs from 'dayjs'; import _ from 'lodash'; @@ -24,7 +24,7 @@ import { IENHaPcn } from 'src/applicant/entity/ienhapcn.entity'; export const PERIOD_START_DATE = '2022-05-02'; -export class ReportService { +export class ReportService implements OnModuleDestroy { constructor( @Inject(Logger) private readonly logger: AppLogger, @Inject(ReportUtilService) @@ -33,8 +33,13 @@ export class ReportService { private readonly ienapplicantStatusRepository: Repository, @InjectRepository(IENHaPcn) private readonly ienHaPcnRepository: Repository, + private readonly connection: Connection, ) {} + async onModuleDestroy() { + await this.connection.close(); + } + captureFromTo(from: string, to: string) { this.reportUtilService._isValidDateValue(from); this.reportUtilService._isValidDateValue(to); diff --git a/apps/api/src/uploadreports.ts b/apps/api/src/uploadreports.ts new file mode 100644 index 00000000..9e70dfcf --- /dev/null +++ b/apps/api/src/uploadreports.ts @@ -0,0 +1,59 @@ +import { NestFactory } from '@nestjs/core'; +import { Context, Handler } from 'aws-lambda'; +import { AppModule } from './app.module'; +import { AppLogger } from './common/logger.service'; +import { ReportService } from './report/report.service'; +import { ReportS3Service } from './report/report.s3.service'; + +let app: any = null; + +/** + * Design this function to trigger existing NestJs appliation services without Api-Getway + * All the schedule and backgroud job trigger will be added here. + * This handler will cache the report 4 data for every existing period + */ +export const handler: Handler = async (event, context: Context) => { + if (!app) { + app = await NestFactory.create(AppModule); + await app.init(); + } + const reportS3Service = app.get(ReportS3Service); + const reportService = app.get(ReportService); + + const appLogger = app.get(AppLogger); + appLogger.log({ event }); + appLogger.log({ context }); + try { + if ( + event.hasOwnProperty('s3Key') && + event.hasOwnProperty('param') && + event.hasOwnProperty('path') + ) { + const { s3Key, path, param } = event; + appLogger.log('Start uploading reports...'); + let data = []; + if (path === 'extract-data') { + data = await reportService.extractApplicantsData( + { from: param.from, to: param.to }, + param.ha_pcn_id, + ); + } else if (path === 'extract-milestone') { + data = await reportService.extractMilestoneData( + { from: param.from, to: param.to }, + param.ha_pcn_id, + ); + } + await reportS3Service + .uploadFile(s3Key, data) + .then(() => { + appLogger.log('File uploaded successfully.'); + }) + .catch((err: any) => { + appLogger.error('File upload failed: ', err); + }); + } + } catch (e) { + appLogger.error(e); + } + appLogger.log('...end caching reports'); +}; diff --git a/apps/web/src/services/report.ts b/apps/web/src/services/report.ts index e07ffdfc..5059ab28 100644 --- a/apps/web/src/services/report.ts +++ b/apps/web/src/services/report.ts @@ -353,6 +353,37 @@ const createDataExtractWorkBook = (data: object[], sheetName: string): WorkBook return workbook; }; +async function checkFileExists(url: string): Promise { + try { + const response = await fetch(url, { method: 'GET', headers: { Range: 'bytes=0-1' } }); + return response.ok; // true if the file exists + } catch (error) { + // eslint-disable-next-line no-console + console.error('Error checking file existence:', error); + return false; + } +} + +async function pollForFile(url: string, interval: number, timeout: number): Promise { + const startTime = Date.now(); + + return new Promise((resolve, reject) => { + const intervalId = setInterval(async () => { + const fileExists = await checkFileExists(url); + + if (fileExists) { + clearInterval(intervalId); + resolve(); + } + + if (Date.now() - startTime > timeout) { + clearInterval(intervalId); + reject(new Error('Polling timeout: File not available within the expected time.')); + } + }, interval); + }); +} + export const createApplicantDataExtractWorkbook = async ( filter: PeriodFilter, ): Promise => { @@ -360,7 +391,14 @@ export const createApplicantDataExtractWorkbook = async ( const result = await getApplicantDataExtract(filter); let applicants = []; if ('url' in result) { - applicants = await fetchJsonDataFromS3Url(result.url); + const { url } = result; + const pollInterval = 5000; // Check every 5 seconds + const pollTimeout = 180000; // Stop after 3 minutes + await pollForFile(url, pollInterval, pollTimeout).catch(error => { + // eslint-disable-next-line no-console + console.error(error.message); + }); + applicants = await fetchJsonDataFromS3Url(url); } else { applicants = result; } @@ -400,7 +438,14 @@ export const createMilestoneDataExtractWorkbook = async ( const result = await getMilestoneDataExtract(filter); let milestones = []; if ('url' in result) { - milestones = await fetchJsonDataFromS3Url(result.url); + const { url } = result; + const pollInterval = 5000; // Check every 5 seconds + const pollTimeout = 180000; // Stop after 3 minutes + await pollForFile(url, pollInterval, pollTimeout).catch(error => { + // eslint-disable-next-line no-console + console.error(error.message); + }); + milestones = await fetchJsonDataFromS3Url(url); } else { milestones = result; } diff --git a/terraform/api.tf b/terraform/api.tf index d730701f..f9aac671 100644 --- a/terraform/api.tf +++ b/terraform/api.tf @@ -62,6 +62,7 @@ resource "aws_lambda_function" "api" { DOCS_BUCKET = var.docs_bucket REPORTS_BUCKET = var.reports_bucket NO_COLOR = "true" + NAMESPACE = local.namespace } } } diff --git a/terraform/main.tf b/terraform/main.tf index 50086cc4..f9472c0d 100644 --- a/terraform/main.tf +++ b/terraform/main.tf @@ -29,6 +29,7 @@ locals { sync_applicant_data_scheduler = "${local.namespace}-hmbc-to-ien-applicants" notify_lambda_name = "${local.namespace}-notifylambda" cache_reports_lambda_name = "${local.namespace}-cache-reports" + report_s3_upload_lambda_name = "${local.namespace}-s3-upload-reports" db_name = "${local.namespace}-db" has_domain = var.domain != "" diff --git a/terraform/report_s3_upload_lambda_function.tf b/terraform/report_s3_upload_lambda_function.tf new file mode 100644 index 00000000..525499d9 --- /dev/null +++ b/terraform/report_s3_upload_lambda_function.tf @@ -0,0 +1,55 @@ +resource "aws_lambda_function" "UploadReports" { + description = "Trigger uploading of Reports" + function_name = local.report_s3_upload_lambda_name + role = aws_iam_role.lambda.arn + runtime = "nodejs18.x" + filename = "./build/empty_lambda.zip" + source_code_hash = filebase64sha256("./build/empty_lambda.zip") + handler = "api/uploadreports.handler" + memory_size = var.function_memory_mb + timeout = 300 + + vpc_config { + security_group_ids = [data.aws_security_group.app.id] + subnet_ids = data.aws_subnet_ids.app.ids + } + + lifecycle { + ignore_changes = [ + # Ignore changes to tags, e.g. because a management agent + # updates these based on some ruleset managed elsewhere. + filename, + source_code_hash, + source_code_size, + last_modified, + ] + } + + environment { + variables = { + NODE_ENV = "production" + RUNTIME_ENV = "hosted" + AUTH_URL = data.aws_ssm_parameter.keycloak_url.value + AUTH_REALM = data.aws_ssm_parameter.keycloak_realm.value + TARGET_ENV = var.target_env + AWS_S3_REGION = var.region + BUILD_ID = var.build_id + BUILD_INFO = var.build_info + POSTGRES_USERNAME = var.db_username + POSTGRES_PASSWORD = data.aws_ssm_parameter.postgres_password.value + POSTGRES_HOST = aws_rds_cluster.pgsql.endpoint + POSTGRES_DATABASE = aws_rds_cluster.pgsql.database_name + REPORTS_BUCKET = var.reports_bucket + NO_COLOR = "true" + } + } +} + +resource "aws_lambda_permission" "allow_invoke_by_lambda" { + statement_id = "AllowInvokeByAnotherLambda" # Unique ID for the permission + action = "lambda:InvokeFunction" + function_name = aws_lambda_function.UploadReports.function_name + principal = "lambda.amazonaws.com" # This specifies that the invoking service is Lambda + # ARN of the role of the Lambda that will invoke this function + source_arn = aws_lambda_function.api.arn # Adjust this with the correct resource +}