diff --git a/backend/prisma/schema.prisma b/backend/prisma/schema.prisma index 15dd4ee4..33590d3f 100644 --- a/backend/prisma/schema.prisma +++ b/backend/prisma/schema.prisma @@ -38,6 +38,7 @@ model file_submission { create_utc_timestamp DateTime @db.Timestamp(6) update_user_id String @db.VarChar(200) update_utc_timestamp DateTime @db.Timestamp(6) + file_error_logs file_error_logs[] file_operation_codes file_operation_codes @relation(fields: [file_operation_code], references: [file_operation_code], onDelete: NoAction, onUpdate: NoAction, map: "file_operation_code_fk") submission_status submission_status_code @relation(fields: [submission_status_code], references: [submission_status_code], onDelete: NoAction, onUpdate: NoAction, map: "submission_status_code_fk") } @@ -228,16 +229,6 @@ model aqi_sample_fractions { update_utc_timestamp DateTime @db.Timestamp(6) } -model aqi_data_classifications { - aqi_data_classifications_id String @id @default(dbgenerated("gen_random_uuid()")) @db.Uuid - custom_id String @db.VarChar(200) - description String? @db.VarChar(2000) - create_user_id String @db.VarChar(200) - create_utc_timestamp DateTime @db.Timestamp(6) - update_user_id String @db.VarChar(200) - update_utc_timestamp DateTime @db.Timestamp(6) -} - model file_operation_codes { file_operation_code String @id(map: "file_operation_code_pk") @db.VarChar(20) description String @db.VarChar(250) @@ -249,3 +240,21 @@ model file_operation_codes { update_utc_timestamp DateTime @db.Timestamp(6) file_submission file_submission[] } + +model file_error_logs { + file_error_log_id String @id @default(dbgenerated("gen_random_uuid()")) @db.Uuid + file_submission_id String? @db.Uuid + file_name String? @db.VarChar(200) + error_log Json? + file_submission file_submission? @relation(fields: [file_submission_id], references: [submission_id], onDelete: NoAction, onUpdate: NoAction, map: "file_submission_id_fk") +} + +model aqi_data_classifications { + aqi_data_classifications_id String @id @default(dbgenerated("gen_random_uuid()")) @db.Uuid + custom_id String @db.VarChar(200) + description String? @db.VarChar(2000) + create_user_id String @db.VarChar(200) + create_utc_timestamp DateTime @db.Timestamp(6) + update_user_id String @db.VarChar(200) + update_utc_timestamp DateTime @db.Timestamp(6) +} diff --git a/backend/src/aqi_api/aqi_api.service.ts b/backend/src/aqi_api/aqi_api.service.ts index aa01e964..21de3b0a 100644 --- a/backend/src/aqi_api/aqi_api.service.ts +++ b/backend/src/aqi_api/aqi_api.service.ts @@ -9,8 +9,8 @@ export class AqiApiService { private readonly logger = new Logger(AqiApiService.name); private axiosInstance: AxiosInstance; - private wait = (ms: number) => - new Promise((resolve) => setTimeout(resolve, ms)); + private wait = (seconds: number) => + new Promise((resolve) => setTimeout(resolve, seconds * 1000)); constructor(private prisma: PrismaService) { this.axiosInstance = axios.create({ @@ -105,9 +105,9 @@ export class AqiApiService { `API call to Observation Import succeeded: ${response.status}`, ); const statusURL = response.headers.location; - const obsResults = await this.getObservationsStatusResult(statusURL); + const obsStatus = await this.getObservationsStatusResult(statusURL); - const errorMessages = this.parseObsResultResponse(obsResults); + const errorMessages = this.parseObsResultResponse(obsStatus); return errorMessages; } } catch (err) { @@ -115,16 +115,17 @@ export class AqiApiService { } } - async getObservationsStatusResult(location: string) { + async getObservationsStatusResult(statusURL: string) { try { - const response = await axios.get(location, { + const response = await axios.get(statusURL, { headers: { Authorization: `token ${process.env.AQI_ACCESS_TOKEN}`, "x-api-key": process.env.AQI_ACCESS_TOKEN, }, }); - await this.wait(15000); + await this.wait(9); + console.log(response.data.id) const obsResultResponse = await axios.get( `${process.env.AQI_BASE_URL}/v2/observationimports/${response.data.id}/result`, @@ -157,16 +158,16 @@ export class AqiApiService { } parseObsResultResponse(obsResults: any) { - let errorMessages = ""; + let errorMessages = []; if (obsResults.errorCount > 0) { obsResults.importItems.forEach((item) => { const rowId = item.rowId; const errors = item.errors; Object.entries(errors).forEach((error) => { - errorMessages += `ERROR: Row ${rowId} Observation file: ${error[1][0].errorMessage}\n`; + let errorLog = `{"rowNum": ${rowId}, "type": "ERROR", "message": {"Observation File": "${error[1][0].errorMessage}"}}`; + errorMessages.push(JSON.parse(errorLog)); }); - errorMessages += "\n"; }); } return errorMessages; @@ -236,37 +237,21 @@ export class AqiApiService { } } - mergeErrorMessages(localErrors: string, remoteErrors: string) { - const localErrorLines = localErrors.split("\n"); - const remoteErrorLines = remoteErrors.split("\n"); - - const errorMap: any = {}; - - const extractRowNumber = (line: string): number | null => { - const match = line.match(/Row (\d+)/); - return match ? parseInt(match[1]) : null; - }; + mergeErrorMessages(localErrors: any[], remoteErrors: any[]) { + const map = new Map(); - const addErrorsToMap = (lines: string[]) => { - lines.forEach((line) => { - const rowNumber = extractRowNumber(line); - if (rowNumber !== null) { - if (!errorMap[rowNumber]) { - errorMap[rowNumber] = []; - } - errorMap[rowNumber].push(line); - } - }); + const mergeItem = (item: any) => { + const exists = map.get(item.rowNum); + map.set( + item.rowNum, + exists + ? { ...exists, message: { ...exists.message, ...item.message } } + : item, + ); }; - addErrorsToMap(localErrorLines); - addErrorsToMap(remoteErrorLines); - - const mergedErrors = Object.keys(errorMap) - .sort((a, b) => parseInt(a) - parseInt(b)) - .flatMap((row) => errorMap[parseInt(row)]) - .join("\n"); + [...localErrors, ...remoteErrors].forEach(mergeItem); - return mergedErrors; + return Array.from(map.values()); } } diff --git a/backend/src/cron-job/cron-job.service.ts b/backend/src/cron-job/cron-job.service.ts index 1b935e5f..faa8e502 100644 --- a/backend/src/cron-job/cron-job.service.ts +++ b/backend/src/cron-job/cron-job.service.ts @@ -433,20 +433,20 @@ export class CronJobService { grab all the files from the DB and S3 bucket that have a status of QUEUED for each file returned, change the status to INPROGRESS and go to the parser */ - if (!this.dataPullDownComplete) { - this.logger.warn("Data pull down from AQSS did not complete"); - return; - } + // if (!this.dataPullDownComplete) { + // this.logger.warn("Data pull down from AQSS did not complete"); + // return; + // } let filesToValidate = await this.fileParser.getQueuedFiles(); if (filesToValidate.length < 1) { console.log("************** NO FILES TO VALIDATE **************"); - return + return; } else { for (const file of filesToValidate) { const fileBinary = await this.objectStore.getFileData(file.file_name); - + this.fileParser.parseFile( fileBinary, file.file_name, @@ -455,7 +455,7 @@ export class CronJobService { ); } this.dataPullDownComplete = false; - return + return; } } } diff --git a/backend/src/file_parse_and_validation/file_parse_and_validation.service.ts b/backend/src/file_parse_and_validation/file_parse_and_validation.service.ts index 7da2a0a2..4f8d3f86 100644 --- a/backend/src/file_parse_and_validation/file_parse_and_validation.service.ts +++ b/backend/src/file_parse_and_validation/file_parse_and_validation.service.ts @@ -600,7 +600,7 @@ export class FileParseValidateService { } async localValidation(allRecords, observaionFilePath) { - let errorLog = ""; + let errorLogs = []; for (const [index, record] of allRecords.entries()) { const isoDateTimeRegex = /^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2})(:(\d{2})(\.\d+)?)?(Z|([+-]\d{2}:\d{2}))?$/; @@ -630,9 +630,11 @@ export class FileParseValidateService { if (record.hasOwnProperty(field) && record[field]) { const valid = isoDateTimeRegex.test(record[field]); if (!valid) { - errorLog += `ERROR: Row ${index + 2} ${field} ${record[field]} is not a valid ISO datetime\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"${field}": "${record[field]} is not valid ISO DateTime"}}`; + errorLogs.push(JSON.parse(errorLog)); } else if (record.hasOwnProperty(field) && !record[field]) { - errorLog += `ERROR: Row ${index + 2} ${field} missing value\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"${field}": "Cannot be empty"}}`; + errorLogs.push(JSON.parse(errorLog)); } } else if (record.hasOwnProperty(field) && !record[field]) { if ( @@ -640,7 +642,8 @@ export class FileParseValidateService { field == "ObservedDateTime" || field == "AnalyzedDateTime" ) { - errorLog += `ERROR: Row ${index + 2} mandatory field ${field} has no value.\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"${field}": "Cannot be empty"}}`; + errorLogs.push(JSON.parse(errorLog)); } } }); @@ -652,7 +655,8 @@ export class FileParseValidateService { numberRegex.test(record[field]) && !isNaN(parseFloat(record[field])); if (record[field] !== "" && !valid) { - errorLog += `ERROR: Row ${index + 2} ${field} ${record[field]} is not a valid number\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"${field}": "${record[field]} is not valid number"}}`; + errorLogs.push(JSON.parse(errorLog)); } } }); @@ -665,10 +669,12 @@ export class FileParseValidateService { record[field], ); if (!present) { - errorLog += `ERROR: Row ${index + 2} ${field} ${record[field]} not found in AQI Units\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"${field}": "${record[field]} not found in AQI Units"}}`; + errorLogs.push(JSON.parse(errorLog)); } } else if (record.hasOwnProperty(field) && !record[field]) { - errorLog += `WARNING: Row ${index + 2} ${field} ${record[field]} is empty\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"${field}": Cannot be empty"}}`; + errorLogs.push(JSON.parse(errorLog)); } }); @@ -678,7 +684,8 @@ export class FileParseValidateService { record.Project, ); if (!present) { - errorLog += `ERROR: Row ${index + 2} Project ${record.Project} not found in AQI Projects\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Project": "${record.Project} not found in AQI Projects"}}`; + errorLogs.push(JSON.parse(errorLog)); } } @@ -688,7 +695,8 @@ export class FileParseValidateService { record.LocationID, ); if (!present) { - errorLog += `ERROR: Row ${index + 2} Location ID ${record.LocationID} not found in AQI Locations\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Location_ID": "${record.LocationID} not found in AQI Locations"}}`; + errorLogs.push(JSON.parse(errorLog)); } } @@ -698,7 +706,8 @@ export class FileParseValidateService { record.Preservative, ); if (!present) { - errorLog += `ERROR: Row ${index + 2} Preservative ${record.Preservative} not found in AQI Preservatives\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Preservative": "${record.Preservative} not found in AQI Preservatives"}}`; + errorLogs.push(JSON.parse(errorLog)); } } @@ -708,7 +717,8 @@ export class FileParseValidateService { record.SamplingConextTag, ); if (!present) { - errorLog += `ERROR: Row ${index + 2} Sampling Conext Tag ${record.SamplingConextTag} not found in AQI Sampling Context Tags\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Sampling_Context_Tag": "${record.SamplingConextTag} not found in AQI Sampling Context Tags"}}`; + errorLogs.push(JSON.parse(errorLog)); } } @@ -718,7 +728,8 @@ export class FileParseValidateService { record.CollectionMethod, ); if (!present) { - errorLog += `ERROR: Row ${index + 2} Collection Method ${record.CollectionMethod} not found in AQI Collection Methods\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Collection_Method": "${record.CollectionMethod} not found in AQI Collection Methods"}}`; + errorLogs.push(JSON.parse(errorLog)); } } @@ -728,7 +739,8 @@ export class FileParseValidateService { record.Medium, ); if (!present) { - errorLog += `ERROR: Row ${index + 2} Medium ${record.Medium} not found in AQI Mediums\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Medium": "${record.Medium} not found in AQI Mediums"}}`; + errorLogs.push(JSON.parse(errorLog)); } } @@ -738,6 +750,8 @@ export class FileParseValidateService { record.ObservedPropertyID, ); if (!present) { + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Observed_Property_ID": "${record.ObservedPropertyID} not found in AQI Observed Properties"}}`; + errorLogs.push(JSON.parse(errorLog)); errorLog += `ERROR: Row ${index + 2} Observed Property ID ${record.ObservedPropertyID} not found in AQI Observed Properties\n`; } } @@ -751,7 +765,8 @@ export class FileParseValidateService { record.DetectionCondition.toUpperCase().replace(/ /g, "_"), ); if (!present) { - errorLog += `ERROR: Row ${index + 2} Detection Condition ${record.DetectionCondition} not found in AQI Detection Conditions\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Detection_Condition": "${record.DetectionCondition} not found in AQI Detection Conditions"}}`; + errorLogs.push(JSON.parse(errorLog)); } } @@ -761,7 +776,8 @@ export class FileParseValidateService { record.Fraction.toUpperCase(), ); if (!present) { - errorLog += `ERROR: Row ${index + 2} Fraction ${record.Fraction} not found in AQI Sample Fractions\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Fraction": "${record.Fraction} not found in AQI Fractions"}}`; + errorLogs.push(JSON.parse(errorLog)); } } @@ -771,7 +787,8 @@ export class FileParseValidateService { record.DataClassification, ); if (!present) { - errorLog += `ERROR: Row ${index + 2} Data Classification ${record.DataClassification} not found in AQI Data Classifications\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Data_Classification": "${record.DataClassification} not found in AQI Data Classesifications"}}`; + errorLogs.push(JSON.parse(errorLog)); } } @@ -781,7 +798,8 @@ export class FileParseValidateService { record.AnalyzingAgency, ); if (!present) { - errorLog += `ERROR: Row ${index + 2} Analyzing Agency ${record.AnalyzingAgency} not found in AQI Agencies\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Analyzing_Agency": "${record.AnalyzingAgency} not found in AQI Agencies"}}`; + errorLogs.push(JSON.parse(errorLog)); } } @@ -791,7 +809,8 @@ export class FileParseValidateService { record.ResultStatus, ); if (!present) { - errorLog += `ERROR: Row ${index + 2} Result Status ${record.ResultStatus} not found in AQI Result Statuses\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Result_Status": "${record.ResultStatus} not found in AQI Result Statuses"}}`; + errorLogs.push(JSON.parse(errorLog)); } } @@ -801,7 +820,8 @@ export class FileParseValidateService { record.ResultGrade, ); if (!present) { - errorLog += `ERROR: Row ${index + 2} Result Grade ${record.ResultGrade} not found in AQI Result Grades\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": {"Result_Grade": "${record.ResultGrade} not found in AQI Result Grades"}}`; + errorLogs.push(JSON.parse(errorLog)); } } @@ -812,7 +832,8 @@ export class FileParseValidateService { record.FieldVisitStartTime, ]); if (visitExists) { - errorLog += `ERROR: Row ${index + 2} Visit for Location ${record.LocationID} at Start Time ${record.FieldVisitStartTime} already exists in AQI Field Visits\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": Visit for Location ${record.LocationID} at Start Time ${record.FieldVisitStartTime} already exists in AQI Field Visits}`; + errorLogs.push(JSON.parse(errorLog)); } // check if the activity already exits -- check if the activity name for that given visit and location already exists @@ -821,7 +842,8 @@ export class FileParseValidateService { [record.ActivityName, record.FieldVisitStartTime, record.LocationID], ); if (activityExists) { - errorLog += `ERROR: Row ${index + 2} Activity Name for Field Visit at Start Time ${record.FieldVisitStartTime} already exists in AQI Activities\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": Activity Name ${record.ActivityName} for Field Visit at Start Time ${record.FieldVisitStartTime} already exists in AQI Activities}`; + errorLogs.push(JSON.parse(errorLog)); } // check if the specimen already exists -- check if the specimen name for that given visit and location already exists @@ -832,10 +854,9 @@ export class FileParseValidateService { record.LocationID, ]); if (specimenExists) { - errorLog += `ERROR: Row ${index + 2} Specimen Name for that Acitivity at Start Time ${record.ObservedDateTime} already exists in AQI Specimens\n`; + let errorLog = `{"rowNum": ${index + 2}, "type": "ERROR", "message": Specimen Name ${record.SpecimenName} for that Acitivity at Start Time ${record.ObservedDateTime} already exists in AQI Specimen}`; + errorLogs.push(JSON.parse(errorLog)); } - - errorLog += "\n"; } // Do a dry run of the observations @@ -845,7 +866,7 @@ export class FileParseValidateService { ); const finalErrorLog = this.aqiService.mergeErrorMessages( - errorLog, + errorLogs, observationsErrors, ); @@ -926,27 +947,36 @@ export class FileParseValidateService { * Do the local validation for each section here - if passed then go to the API calls - else create the message/file/email for the errors */ - await this.fileSubmissionsService.updateFileStatus( - file_submission_id, - "INPROGRESS", - ); + // await this.fileSubmissionsService.updateFileStatus( + // file_submission_id, + // "INPROGRESS", + // ); - const localValidationResults = this.localValidation( + const localValidationResults = await this.localValidation( allRecords, ObsFilePath, ); - if ((await localValidationResults).includes("ERROR")) { + if (localValidationResults.some((item) => item.type === "ERROR")) { /* * Set the file status to 'REJECTED' - * Create the error log file here + * Save the error logs to the database table * Send the an email to the submitter and the ministry contact that is inside the file */ await this.fileSubmissionsService.updateFileStatus( file_submission_id, "REJECTED", ); - console.log(await localValidationResults); + + const file_error_log_data = { + file_submission_id: file_submission_id, + file_name: fileName, + error_log: localValidationResults, + }; + + await this.prisma.file_error_logs.create({ + data: file_error_log_data, + }); return; } else if (!(await localValidationResults).includes("ERROR")) { diff --git a/migrations/sql/V1.1.0__file_error_logs.sql b/migrations/sql/V1.1.0__file_error_logs.sql new file mode 100644 index 00000000..f0d366de --- /dev/null +++ b/migrations/sql/V1.1.0__file_error_logs.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS enmods.file_error_logs ( + file_error_log_id UUID PRIMARY KEY NOT NULL DEFAULT gen_random_uuid(), + file_submission_id UUID NULL, + file_name varchar(200) NULL, + error_log JSONB NULL, + CONSTRAINT file_submission_id_fk FOREIGN KEY(file_submission_id) REFERENCES enmods.file_submission(submission_id) +); \ No newline at end of file