diff --git a/.gitignore b/.gitignore index 3e5f06c34..3af96fa00 100644 --- a/.gitignore +++ b/.gitignore @@ -100,3 +100,7 @@ bld/ MCS.FOI.S3FileConversion/MCS.FOI.S3FileConversion/QtBinariesWindows/ computingservices/ZippingServices/env/* openshift/templates/zippingservice/zipper.env +*.locenv + +MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/result_*.pdf +MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/SourceFiles/result_*.pdf diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDF/CalendarFileProcessor.cs b/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDF/CalendarFileProcessor.cs index fdf2d03c7..079fb37be 100644 --- a/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDF/CalendarFileProcessor.cs +++ b/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDF/CalendarFileProcessor.cs @@ -147,14 +147,22 @@ public CalendarFileProcessor(Stream sourceStream) string organizer = string.Empty; //Organizer Name and Email - if (e.Organizer != null) + if (e.Organizer?.Value != null) { - organizer = e.Organizer.CommonName + "(" + e.Organizer.Value.AbsoluteUri + ")"; + try + { + organizer = e.Organizer?.CommonName + "(" + e.Organizer?.Value.AbsoluteUri + ")"; + } + catch + { + + organizer = @"Unknown Organizer"; + } } else { - organizer = @"Unknown Organizer(mailto:unknownorganizer@calendar.google.com)"; + organizer = @"Unknown Organizer(mailto:unknownorganizer@calendar.bcgov.ca)"; } htmlString.Append(@" From: @@ -174,7 +182,7 @@ public CalendarFileProcessor(Stream sourceStream) //Meeting created timestamp htmlString.Append(@" Sent: - " + e.DtStamp.Date + ""); + " + e.DtStamp.Value + ""); //Priority htmlString.Append(@" @@ -184,12 +192,12 @@ public CalendarFileProcessor(Stream sourceStream) //Meeting Start Timestamp htmlString.Append(@" Start Time: - " + e.DtStart.Date + ""); + " + e.DtStart.Value + ""); //Meeting End Timestamp htmlString.Append(@" End Time: - " + e.DtEnd.Date + ""); + " + e.DtEnd.Value + ""); //Meeting Message string message = @"" + e.Description?.Replace("\n", "
"); message = message.Replace("<br>", "
").Replace("<br/>", "
"); @@ -243,6 +251,22 @@ public CalendarFileProcessor(Stream sourceStream) } + + private DateTime GetPSTTime(DateTime _timetoconvert) + { + DateTime converteddate = _timetoconvert; + if (TimeZone.CurrentTimeZone.StandardName != "Pacific Standard Time" || _timetoconvert.Kind == DateTimeKind.Utc ) + { + + converteddate = TimeZoneInfo.ConvertTimeBySystemTimeZoneId(converteddate, "Pacific Standard Time"); + + } + + return converteddate; + } + + + /// /// Converts HTML string to PDF using syncfution library and blink engine /// diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/CalendarFileProcessorTest.cs b/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/CalendarFileProcessorTest.cs index e457a0e70..76ec57c30 100644 --- a/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/CalendarFileProcessorTest.cs +++ b/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/CalendarFileProcessorTest.cs @@ -42,7 +42,7 @@ public void ProcessSimpleCalendarFilesTest() Dictionary> attachments = new Dictionary>(); string rootFolder = getSourceFolder(); Stream output = new MemoryStream(); - Stream testFile = new FileStream(Path.Combine(getSourceFolder(), "test-cal.ics"), FileMode.Open, FileAccess.Read); + Stream testFile = new FileStream(Path.Combine(getSourceFolder(), "Backlog refinement.ics"), FileMode.Open, FileAccess.Read); CalendarFileProcessor calendarFileProcessor = new CalendarFileProcessor(testFile); calendarFileProcessor.WaitTimeinMilliSeconds = 5000; @@ -50,6 +50,8 @@ public void ProcessSimpleCalendarFilesTest() string outputPath = Path.Combine(getSourceFolder(), "output"); (isProcessed, message, output, attachments) = calendarFileProcessor.ProcessCalendarFiles(); Assert.IsTrue(isProcessed == true, $"Calendar to PDF Conversion failed"); + + SaveStreamAsFile(getSourceFolder(), output, "result_Backlog refinement.pdf"); } [TestMethod] @@ -60,7 +62,7 @@ public void ProcessCalendarFileWithAttachmentsTest() Dictionary> attachments = new Dictionary>(); string rootFolder = getSourceFolder(); Stream output = new MemoryStream(); - Stream testFile = new FileStream(Path.Combine(getSourceFolder(), "test-with-attachments.ics"), FileMode.Open, FileAccess.Read); + Stream testFile = new FileStream(Path.Combine(getSourceFolder(), "Backlog refinement.ics"), FileMode.Open, FileAccess.Read); CalendarFileProcessor calendarFileProcessor = new CalendarFileProcessor(testFile); calendarFileProcessor.WaitTimeinMilliSeconds = 5000; @@ -71,6 +73,8 @@ public void ProcessCalendarFileWithAttachmentsTest() bool isAttachmentsExists = attachments.Count == 2; Assert.IsTrue(isAttachmentsExists, $"Attachments not found"); + + SaveStreamAsFile(getSourceFolder(), output, "result_Backlog refinement.pdf"); } [TestMethod] @@ -81,18 +85,34 @@ public void ProcessComplexCalendarFilesTest() Dictionary> attachments = new Dictionary>(); string rootFolder = getSourceFolder(); Stream output = new MemoryStream(); - Stream testFile = new FileStream(Path.Combine(getSourceFolder(), "test-problematic-calendar.ics"), FileMode.Open, FileAccess.Read); + Stream testFile = new FileStream(Path.Combine(getSourceFolder(), "test-with-attachments.ics"), FileMode.Open, FileAccess.Read); CalendarFileProcessor calendarFileProcessor = new CalendarFileProcessor(testFile); calendarFileProcessor.WaitTimeinMilliSeconds = 5000; calendarFileProcessor.FailureAttemptCount = 10; (isProcessed, message, output, attachments) = calendarFileProcessor.ProcessCalendarFiles(); Assert.IsTrue(isProcessed == true, $"Calendar to PDF Conversion failed"); + + SaveStreamAsFile(getSourceFolder(), output, "result_test-with-attachmentsr.pdf"); } private string getSourceFolder() { - return "C:\\Projects\\foi-docreviewer\\MCS.FOI.S3FileConversion\\MCS.FOI.CalendarToPDFUnitTests\\SourceFiles"; + return "C:\\AOT\\FOI\\Source\\foi-docreviewer\\foi-docreviewer\\MCS.FOI.S3FileConversion\\MCS.FOI.CalendarToPDFUnitTests\\SourceFiles"; + } + + public static void SaveStreamAsFile(string filePath, Stream stream, string fileName) + { + stream.Position = 0; + var path = Path.Combine(filePath, fileName); + var bytesInStream = new byte[stream.Length]; + + stream.Read(bytesInStream, 0, (int)bytesInStream.Length); + + using (var outputFileStream = new FileStream(path, FileMode.Create)) + { + outputFileStream.Write(bytesInStream, 0, bytesInStream.Length); + } } } } diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/SourceFiles/Backlog refinement.ics b/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/SourceFiles/Backlog refinement.ics new file mode 100644 index 000000000..193ce4f28 --- /dev/null +++ b/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/SourceFiles/Backlog refinement.ics @@ -0,0 +1,226 @@ +BEGIN:VCALENDAR +PRODID:-//Microsoft Corporation//Outlook 16.0 MIMEDIR//EN +VERSION:2.0 +METHOD:REQUEST +X-MS-OLK-FORCEINSPECTOROPEN:TRUE +BEGIN:VTIMEZONE +TZID:Pacific Standard Time +BEGIN:STANDARD +DTSTART:16011104T020000 +RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=11 +TZOFFSETFROM:-0700 +TZOFFSETTO:-0800 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:16010311T020000 +RRULE:FREQ=YEARLY;BYDAY=2SU;BYMONTH=3 +TZOFFSETFROM:-0800 +TZOFFSETTO:-0700 +END:DAYLIGHT +END:VTIMEZONE +BEGIN:VEVENT +ATTENDEE;CN="Abin Antony";ROLE=OPT-PARTICIPANT;RSVP=TRUE:mailto:abin.antony + @aot-technologies.com +ATTENDEE;CN="Antony, Abin CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:invalid:n + omail +ATTENDEE;CN=adam.coard@aot-technologies.com;ROLE=OPT-PARTICIPANT;RSVP=TRUE: + mailto:adam.coard@aot-technologies.com +ATTENDEE;CN="Andrews, Arielle CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:inval + id:nomail +ATTENDEE;CN="Balachandran, Vineet CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:i + nvalid:nomail +ATTENDEE;CN="Coard, Adam CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:invalid:no + mail +ATTENDEE;CN="Divya Viswanath";ROLE=OPT-PARTICIPANT;RSVP=TRUE:mailto:divya.v + @aot-technologies.com +ATTENDEE;CN="Jacklyn Harrietha";ROLE=OPT-PARTICIPANT;RSVP=TRUE:mailto:jackl + yn.harrietha@aot-technologies.com +ATTENDEE;CN="Harrietha, Jacklyn CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:inv + alid:nomail +ATTENDEE;CN="Mullane, Loren CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:invalid + :nomail +ATTENDEE;CN="Prodan, Matthew CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:invali + d:nomail +ATTENDEE;CN="Qi, Richard CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:invalid:no + mail +ATTENDEE;CN='richard.qi@aot-technologies.com';ROLE=OPT-PARTICIPANT;RSVP=TRU + E:mailto:richard.qi@aot-technologies.com +ATTENDEE;CN=sumathi.thirumani@aot-technologies.com;ROLE=OPT-PARTICIPANT;RSV + P=TRUE:mailto:sumathi.thirumani@aot-technologies.com +ATTENDEE;CN="Thirumani, Sumathi CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:inv + alid:nomail +ATTENDEE;CN='vineet.balachandran@aot-technologies.com';ROLE=OPT-PARTICIPANT + ;RSVP=TRUE:mailto:vineet.balachandran@aot-technologies.com +ATTENDEE;CN="Viswanath, Divya CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:inval + id:nomail +ATTENDEE;CN="Aparna S";ROLE=OPT-PARTICIPANT;RSVP=TRUE:mailto:aparna.s@aot-t + echnologies.com +CLASS:PUBLIC +CREATED:20240918T224519Z +DESCRIPTION:Moving back to 2x1 hr sessions based on recent team patterns + – Can extend/remove meetings as \n\n \n\nI’m extending this invite to + everyone\, however the process we’ve agreed to trial for refinement goin + g forward is: \n\n1. Stories are written and shared with the team minimum + 48 hours in advance\n2. All members of the team review stories in their ow + n time\, add questions or discussion points within the card\n3. Team to de + termine who will attend (the technical members of the team in particular h + ave discussed rotating attendance\, and filling each other in during techn + ical standups and sprint planning)\n\n \n\nAs is our practice\, we will ch + eck back if this approach is working after a couple of sprints\n\n________ + ________________________________________________________________________ \ + n\nMicrosoft Teams meeting \n\nJoin on your computer or mobile app \n\nCli + ck here to join the meeting \n\nOr call in (audio + only) \n\n+1 778-401-6289\,\,21501988# + Canada\, Victoria \n\nPhone Conference ID: 215 019 88# \n\nFind a local n + umber | Reset PIN \n\nLearn More | Meeting options < + https://teams.microsoft.com/meetingOptions/?organizerId=adaba6c5-1521-4b0d + -8e1e-11a9bfe1c924&tenantId=6fdb5200-3d0d-4a8a-b036-d3685e359adc&threadId= + 19_meeting_Y2M4ZDFjYmItZDkzOC00MWFmLThhMzYtN2UxN2EwZWZmOGUz@thread.v2&mess + ageId=0&language=en-US> \n\n_____________________________________________ + ___________________________________ \n\n \n\n +DTEND;TZID="Pacific Standard Time":20240917T103000 +DTSTAMP:20211201T212305Z +DTSTART;TZID="Pacific Standard Time":20240917T093000 +LAST-MODIFIED:20240918T224519Z +LOCATION:Microsoft Teams Meeting +ORGANIZER;CN="Pilchar, Molly CITZ:EX":invalid:nomail +PRIORITY:5 +RECURRENCE-ID;TZID="Pacific Standard Time":20240917T093000 +SEQUENCE:210 +SUMMARY;LANGUAGE=en-us:Backlog refinement +TRANSP:OPAQUE +UID:040000008200E00074C5B7101A82E00800000000309A43BC1A3DD701000000000000000 + 01000000099BD2671DADE4B4CAF83DA04954B3577 +X-ALT-DESC;FMTTYPE=text/html:\n

Moving back to 2x1 hr sessions based on recent team patterns + –\; Can extend/remove meetings as

 \;

I’\;m extending this invi + te to everyone\, however the process we’\;ve agreed to trial for refi + nement going forward is:

  1. Stories are written and shared with the team minimum 48 ho + urs in advance
  2. All members of the team review stories i + n their own time\, add questions or discussion points within the card +
  3. Team to determine who will attend (the technical members of + the team in particular have discussed rotating attendance\, and filling e + ach other in during technical standups and sprint planning)

 \;

As i + s our practice\, we will check back if this approach is working after a co + uple of sprints

________________________________________________________________ + ________________

Micr + osoft Teams meeting

______________ + __________________________________________________________________ +

 \;

+X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE +X-MICROSOFT-CDO-IMPORTANCE:1 +X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY +X-MICROSOFT-DISALLOW-COUNTER:FALSE +X-MS-OLK-APPTSEQTIME:20211013T002428Z +BEGIN:VALARM +TRIGGER:-PT15M +ACTION:DISPLAY +DESCRIPTION:Reminder +END:VALARM +END:VEVENT +END:VCALENDAR diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDF/MSGFileProcessor.cs b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDF/MSGFileProcessor.cs index 5e6c443f0..5f4c8e42e 100644 --- a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDF/MSGFileProcessor.cs +++ b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDF/MSGFileProcessor.cs @@ -57,15 +57,25 @@ public MSGFileProcessor(Stream sourceStream) var _attachment = (Storage.Message)attachment; var filename = _attachment.FileName; var extension = Path.GetExtension(filename); + var baseFilename = Path.GetFileNameWithoutExtension(filename); if (!string.IsNullOrEmpty(extension)) { _attachment.Save(attachmentStream); Dictionary attachmentInfo = new Dictionary(); - + + // If the filename already exists, increment the duplicate count to create a unique filename if (fileNameHash.ContainsKey(filename)) { - - filename = Path.GetFileNameWithoutExtension(filename) + '1' + extension; + int duplicateCount = 1; // Initialize the duplicate count + string newFilename; + + // Loop until a unique filename is found + do + { + newFilename = baseFilename + duplicateCount.ToString() + extension; + duplicateCount++; + } while (fileNameHash.ContainsKey(newFilename)); + filename = newFilename; } fileNameHash.Add(filename, true); @@ -73,7 +83,7 @@ public MSGFileProcessor(Stream sourceStream) var sentOn = _attachment.SentOn.ToString(); if (!string.IsNullOrEmpty(sentOn)) lastModified = sentOn; - + var attachmentSize = attachmentStream.Length.ToString(); if (string.IsNullOrEmpty(attachmentSize)) attachmentSize = attachmentStream.Capacity.ToString(); @@ -91,19 +101,27 @@ public MSGFileProcessor(Stream sourceStream) var _attachment = (Storage.Attachment)attachment; var filename = _attachment.FileName; var extension = Path.GetExtension(filename); - + var baseFilename = Path.GetFileNameWithoutExtension(filename); if (!string.IsNullOrEmpty(extension)) { attachmentStream.Write(_attachment.Data, 0, _attachment.Data.Length); Dictionary attachmentInfo = new Dictionary(); - + + // If the filename already exists, increment the duplicate count to create a unique filename if (fileNameHash.ContainsKey(filename)) { - - filename = Path.GetFileNameWithoutExtension(filename) + '1' + extension; + int duplicateCount = 1; // Initialize the duplicate count + string newFilename; + // Loop until a unique filename is found + do + { + newFilename = baseFilename + '-' +duplicateCount.ToString() + extension; + duplicateCount++; + } while (fileNameHash.ContainsKey(newFilename)); + filename = newFilename; // Set the unique filename } fileNameHash.Add(filename, true); - attachmentInfo.Add("filename", _attachment.FileName); + attachmentInfo.Add("filename", filename); attachmentInfo.Add("s3filename", filename); attachmentInfo.Add("cid", _attachment.ContentId); attachmentInfo.Add("size", _attachment.Data.Length.ToString()); @@ -153,7 +171,21 @@ public MSGFileProcessor(Stream sourceStream) } } var startAt = 0; - foreach (var inlineAttachment in inlineAttachments.OrderBy(m => m.GetType().GetProperty("RenderingPosition").GetValue(m, null))) + foreach (var inlineAttachment in inlineAttachments.OrderBy(m => + { + int pos = (int)m.GetType().GetProperty("RenderingPosition").GetValue(m, null); + if (pos > -1) + { + return pos; + } + else + { + var _inlineAttachment = (Storage.Attachment)m; + Regex regex = new Regex(@""); + Match match = regex.Match(bodyreplaced, startAt); + return match.Index; + } + })) { if (rtfInline) { @@ -185,7 +217,7 @@ public MSGFileProcessor(Stream sourceStream) else if (htmlInline) { var _inlineAttachment = (Storage.Attachment)inlineAttachment; - Regex regex = new Regex(""); + Regex regex = new Regex(@""); Match match = regex.Match(bodyreplaced, startAt); if (match.Success) { @@ -198,27 +230,27 @@ public MSGFileProcessor(Stream sourceStream) if (width > maxSize && width >= height) { float scale = maxSize / width; - width = (int) (width * scale); - height = (int) (height * scale); + width = (int)(width * scale); + height = (int)(height * scale); } if (height > maxSize) { float scale = maxSize / height; - width = (int) (width * scale); - height = (int) (height * scale); + width = (int)(width * scale); + height = (int)(height * scale); } string widthString = string.Empty; string heightString = string.Empty; if (width > 0) { - widthString = " width =\"" + width +"\""; + widthString = " width =\"" + width + "\""; } if (height > 0) { heightString = " height =\"" + height + "\""; } - string imgReplacementString = ""; - bodyreplaced = regex.Replace(bodyreplaced, imgReplacementString, 1, startAt); + string imgReplacementString = ""; + bodyreplaced = regex.Replace(bodyreplaced, imgReplacementString, Int32.MaxValue, startAt); startAt = match.Index + imgReplacementString.Length; } foreach (KeyValuePair> attachment in attachmentsObj) @@ -241,7 +273,7 @@ public MSGFileProcessor(Stream sourceStream) if (!string.IsNullOrEmpty(attachmentsList)) { - htmlString += (@" + htmlString += (@" Attachments: " + attachmentsList.Remove(attachmentsList.Length - 2, 2) + ""); } @@ -251,7 +283,8 @@ public MSGFileProcessor(Stream sourceStream) if (bodyreplaced.Substring(0, 4) == ""); bodyreplaced = bodyreplaced.Insert(bodyStart.Index + bodyStart.Length, htmlString); @@ -505,12 +538,24 @@ private string GenerateHtmlfromMsg(Storage.Message msg) Subject: " + msg.Subject + ""); + DateTime sentDate = Convert.ToDateTime(msg.SentOn); + if(sentDate == DateTime.MinValue) + { + sentDate = Convert.ToDateTime(msg.CreationTime); + } + if (TimeZone.CurrentTimeZone.StandardName != "Pacific Standard Time") + { + + sentDate = TimeZoneInfo.ConvertTimeBySystemTimeZoneId(sentDate, "Pacific Standard Time"); + + } + //Message Sent On timestamp htmlString.Append(@" Sent: - " + msg.SentOn + ""); + " + sentDate + ""); + - //Message body //string message = @"" + msg.BodyText?.Replace("\n", "").Replace("<br>", "")?.Replace("<br/>", ""); diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/MSGFileProcessorTest.cs b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/MSGFileProcessorTest.cs index 1e9848bc6..d4de41a4b 100644 --- a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/MSGFileProcessorTest.cs +++ b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/MSGFileProcessorTest.cs @@ -50,6 +50,8 @@ public void ProcessSimpleMSGFilesTest() msgFileProcessor.FailureAttemptCount = 10; (converted, message, output, attachments) = msgFileProcessor.ConvertToPDF(); Assert.IsTrue(converted == true, $"MSG to PDF Conversion failed for {testFile}"); + + SaveStreamAsFile(getSourceFolder(), output, "result_simple-test-msg-file.pdf"); } [TestMethod] @@ -68,7 +70,7 @@ public void ProcessMSGFileWithAttachmentsTest() (converted, message, output, attachments) = msgFileProcessor.ConvertToPDF(); Assert.IsTrue(converted == true, $"MSG to PDF Conversion failed for {testFile}"); - SaveStreamAsFile(getSourceFolder(), output, "result.pdf"); + SaveStreamAsFile(getSourceFolder(), output, "result_Test-MSG-File-with-Attachments.pdf"); bool isAttachmentsExists = attachments.Count == 3; Assert.IsTrue(isAttachmentsExists, $"MSG PDF file does not exists {testFile}"); diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/Test-MSG-File-with-Attachments.pdf b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/Test-MSG-File-with-Attachments.pdf deleted file mode 100644 index 43ef48bfa..000000000 Binary files a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/Test-MSG-File-with-Attachments.pdf and /dev/null differ diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/simple-test-msg-file.pdf b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/simple-test-msg-file.pdf deleted file mode 100644 index 36d63a55b..000000000 Binary files a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/simple-test-msg-file.pdf and /dev/null differ diff --git a/api/migrations/versions/9d45ce57481e_.py b/api/migrations/versions/9d45ce57481e_.py new file mode 100644 index 000000000..03e94fea5 --- /dev/null +++ b/api/migrations/versions/9d45ce57481e_.py @@ -0,0 +1,35 @@ +"""empty message + +Revision ID: 9d45ce57481e +Revises: 18a45d1b33cc +Create Date: 2024-06-06 10:19:45.739225 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '9d45ce57481e' +down_revision = '18a45d1b33cc' +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table('PDFStitchJobAttributes', + sa.Column('attributesid', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('pdfstitchjobid', sa.Integer(), nullable=False), + sa.Column('version', sa.Integer(), nullable=False), + sa.Column('ministryrequestid', sa.Integer(), nullable=False), + sa.Column('attributes', postgresql.JSON(astext_type=sa.Text()), nullable=False), + sa.Column('createdat', sa.TIMESTAMP, nullable=False, server_default=sa.func.now()), + sa.Column('createdby', sa.String(length=120), nullable=True), + sa.PrimaryKeyConstraint('attributesid'), + sa.ForeignKeyConstraint(['pdfstitchjobid', 'version'], ['PDFStitchJob.pdfstitchjobid', 'PDFStitchJob.version'], ) + ) + + +def downgrade(): + op.drop_table('PDFStitchJobAttributes') + diff --git a/api/reviewer_api/models/DeduplicationJob.py b/api/reviewer_api/models/DeduplicationJob.py index 81699e690..0258ed6c0 100644 --- a/api/reviewer_api/models/DeduplicationJob.py +++ b/api/reviewer_api/models/DeduplicationJob.py @@ -64,12 +64,12 @@ def getdedupestatus(cls, ministryrequestid): executions = [] try: sql = """select distinct on (deduplicationjobid) deduplicationjobid, version, - filename, status, documentmasterid, trigger + filename, status, documentmasterid, trigger, message from "DeduplicationJob" fcj where ministryrequestid = :ministryrequestid order by deduplicationjobid, "version" desc""" rs = db.session.execute(text(sql), {'ministryrequestid': ministryrequestid}) for row in rs: - executions.append({"deduplicationjobid": row["deduplicationjobid"], "version": row["version"], "filename": row["filename"], "status": row["status"], "documentmasterid": row["documentmasterid"], "trigger":row["trigger"]}) + executions.append({"deduplicationjobid": row["deduplicationjobid"], "version": row["version"], "filename": row["filename"], "status": row["status"], "documentmasterid": row["documentmasterid"], "trigger":row["trigger"], "message": row["message"]}) except Exception as ex: logging.error(ex) db.session.close() diff --git a/api/reviewer_api/models/PDFStitchJobAttributes.py b/api/reviewer_api/models/PDFStitchJobAttributes.py new file mode 100644 index 000000000..899a7199a --- /dev/null +++ b/api/reviewer_api/models/PDFStitchJobAttributes.py @@ -0,0 +1,66 @@ +from .db import db, ma +from datetime import datetime as datetime2 +from sqlalchemy.dialects.postgresql import JSON +from sqlalchemy import func, and_ +from .default_method_result import DefaultMethodResult +from .DocumentDeleted import DocumentDeleted +from .DocumentMaster import DocumentMaster +import logging + + +class PDFStitchJobAttributes(db.Model): + __tablename__ = "PDFStitchJobAttributes" + # Defining the columns + attributesid = db.Column(db.Integer, primary_key=True, autoincrement=True) + pdfstitchjobid = db.Column(db.Integer, db.ForeignKey("PDFStitchJob.pdfstitchjobid")) + version = db.Column(db.Integer, db.ForeignKey("PDFStitchJob.version")) + ministryrequestid = db.Column(db.Integer, nullable=False) + attributes = db.Column(JSON, unique=False, nullable=False) + createdat = db.Column(db.DateTime, default=datetime2.now, nullable=False) + createdby = db.Column(db.String(120), nullable=False) + + + @classmethod + def insert(cls, row): + try: + db.session.add(row) + db.session.commit() + return DefaultMethodResult( + True, + "PDF Stitch Job Attributes recorded for ministryrequestid: {0}".format( + row.ministryrequestid + ), + row.pdfstitchjobid, + ) + except Exception as ex: + logging.error(ex) + finally: + db.session.close() + + @classmethod + def getpdfstitchjobattributesbyid(cls, requestid): + try: + pdfstitchjobattributesschema = PDFStitchJobAttributesSchema(many=False) + query = db.session.query(PDFStitchJobAttributes).filter( + PDFStitchJobAttributes.ministryrequestid == requestid + ).first() + return pdfstitchjobattributesschema.dump(query) + except Exception as ex: + logging.error(ex) + finally: + db.session.close() + + + + +class PDFStitchJobAttributesSchema(ma.Schema): + class Meta: + fields = ( + "attributesid", + "pdfstitchjobid", + "version", + "ministryrequestid", + "attributes", + "createdat", + "createdby", + ) diff --git a/api/reviewer_api/resources/document.py b/api/reviewer_api/resources/document.py index 2cd9b7d8c..9aba5559f 100644 --- a/api/reviewer_api/resources/document.py +++ b/api/reviewer_api/resources/document.py @@ -29,6 +29,7 @@ from reviewer_api.services.documentservice import documentservice from reviewer_api.services.docdeletedpageservice import docdeletedpageservice +from reviewer_api.services.jobrecordservice import jobrecordservice API = Namespace('Document Services', description='Endpoints for deleting and replacing documents') TRACER = Tracer.get_instance() @@ -122,10 +123,17 @@ def get(requestid): response.raise_for_status() # get request status jsonobj = response.json() + balancefeeoverrodforrequest = jobrecordservice().isbalancefeeoverrodforrequest(requestid) + outstandingbalance=0 + if 'cfrfee' in jsonobj and 'feedata' in jsonobj['cfrfee'] and "balanceDue" in jsonobj['cfrfee']['feedata']: + outstandingbalancestr = jsonobj['cfrfee']['feedata']["balanceDue"] + outstandingbalance = float(outstandingbalancestr) requestinfo = { "bcgovcode": jsonobj["bcgovcode"], "requesttype": jsonobj["requestType"], "validoipcreviewlayer": documentservice().validate_oipcreviewlayer(jsonobj, requestid), + "outstandingbalance": outstandingbalance, + "balancefeeoverrodforrequest": balancefeeoverrodforrequest } documentdivisionslist,result = documentservice().getdocuments(requestid, requestinfo["bcgovcode"]) return json.dumps({"requeststatuslabel": jsonobj["requeststatuslabel"], "documents": result, "requestnumber":jsonobj["axisRequestId"], "requestinfo":requestinfo, "documentdivisions":documentdivisionslist}), 200 diff --git a/api/reviewer_api/resources/foiflowmasterdata.py b/api/reviewer_api/resources/foiflowmasterdata.py index c42af161b..1d50eb851 100644 --- a/api/reviewer_api/resources/foiflowmasterdata.py +++ b/api/reviewer_api/resources/foiflowmasterdata.py @@ -172,7 +172,8 @@ class FOIFlowS3PresignedRedline(Resource): def post(ministryrequestid, redactionlayer="redline", layertype="redline"): try: data = request.get_json() - requesttype = data["divdocumentList"] + # print("data!:",data) + requesttype = data["requestType"] documentmapper = redactionservice().getdocumentmapper( data["divdocumentList"][0]["documentlist"][0]["filepath"].split("/")[3] ) @@ -199,17 +200,27 @@ def post(ministryrequestid, redactionlayer="redline", layertype="redline"): packagetype = "redline" if redactionlayer == "oipc": packagetype = "oipcreview" if layertype == "oipcreview" else "oipcredline" - + if layertype == "consult": + packagetype = "consult" + + #check if is single redline package + is_single_redline = is_single_redline_package(_bcgovcode, packagetype, requesttype) + # print("is_single_redline:",is_single_redline) + #print("divdocumentList:",data["divdocumentList"]) for div in data["divdocumentList"]: if len(div["documentlist"]) > 0: + # print("filepathlist:" , div["documentlist"][0]["filepath"]) filepathlist = div["documentlist"][0]["filepath"].split("/")[4:] - if is_single_redline_package(_bcgovcode, packagetype, requesttype) == False: + if is_single_redline == False: division_name = div["divisionname"] # generate save url for stitched file filepath_put = "{0}/{2}/{1}/{0} - {2} - {1}.pdf".format( filepathlist[0], division_name, packagetype ) - + if packagetype == "consult": + filepath_put = "{0}/{2}/{2} - {1} - {0}.pdf".format( + filepathlist[0], division_name, 'Consult' + ) s3path_save = s3client.generate_presigned_url( ClientMethod="get_object", Params={ @@ -261,14 +272,17 @@ def post(ministryrequestid, redactionlayer="redline", layertype="redline"): ) elif len(div["incompatableList"]) > 0: filepathlist = div["incompatableList"][0]["filepath"].split("/")[4:] - if is_single_redline_package(_bcgovcode, packagetype, requesttype) and singlepkgpath is None : + if is_single_redline and singlepkgpath is None : if len(div["documentlist"]) > 0 or len(div["incompatableList"]) > 0: div = data["divdocumentList"][0] filepathlist = div["documentlist"][0]["filepath"].split("/")[4:] + # print("filepathlist:",filepathlist) filename = filepathlist[0] + # print("filename1:",filename) filepath_put = "{0}/{2}/{1}-Redline.pdf".format( filepathlist[0],filename, packagetype ) + # print("filepath_put:",filepath_put) s3path_save = s3client.generate_presigned_url( ClientMethod="get_object", Params={ @@ -279,10 +293,11 @@ def post(ministryrequestid, redactionlayer="redline", layertype="redline"): ExpiresIn=3600, HttpMethod="PUT", ) + # print("s3path_save:",s3path_save) singlepkgpath = s3path_save data["s3path_save"] = s3path_save - if is_single_redline_package(_bcgovcode, packagetype, requesttype): + if is_single_redline: for div in data["divdocumentList"]: if len(div["documentlist"]) > 0: documentlist_copy = div["documentlist"][:] @@ -301,7 +316,7 @@ def post(ministryrequestid, redactionlayer="redline", layertype="redline"): data["requestnumber"] = filepathlist[0] data["bcgovcode"] = _bcgovcode - data["issingleredlinepackage"] = "Y" if is_single_redline_package(_bcgovcode, packagetype, requesttype) else "N" + data["issingleredlinepackage"] = "Y" if is_single_redline else "N" return json.dumps(data), 200 except BusinessException as exception: return {"status": exception.status_code, "message": exception.message}, 500 diff --git a/api/reviewer_api/resources/redaction.py b/api/reviewer_api/resources/redaction.py index 4b633b1f3..0e1f20195 100644 --- a/api/reviewer_api/resources/redaction.py +++ b/api/reviewer_api/resources/redaction.py @@ -231,7 +231,7 @@ class AnnotationMetadata(Resource): @staticmethod @TRACER.trace() @cross_origin(origins=allowedorigins()) - @auth.require + #@auth.require def get(ministryrequestid, redactionlayer): try: result = redactionservice().getannotationinfobyrequest(ministryrequestid, redactionlayer) @@ -342,7 +342,7 @@ def post(): try: requestjson = request.get_json() print("\nrequestjson:",requestjson) - if(requestjson['bcgovcode'] == "mcf"): + if(requestjson['bcgovcode'] == "mcf" and requestjson['requesttype'] == "personal"): finalpackageschema = MCFFinalPackageSchema().load(requestjson) else: finalpackageschema = FinalPackageSchema().load(requestjson) diff --git a/api/reviewer_api/schemas/finalpackage.py b/api/reviewer_api/schemas/finalpackage.py index 94f5f0b4b..bfb334589 100644 --- a/api/reviewer_api/schemas/finalpackage.py +++ b/api/reviewer_api/schemas/finalpackage.py @@ -10,6 +10,8 @@ class FileSchema(Schema): class AttributeSchema(Schema): files = fields.Nested(FileSchema, many=True, required=True, allow_none=False) +class FeeOverrideSchema(Schema): + feeoverridereason = fields.Str(data_key="feeoverridereason", allow_none=True) class SummaryPkgSchema(Schema): divisionid = fields.Int(data_key="divisionid", allow_none=True) @@ -30,6 +32,9 @@ class FinalPackageSchema(Schema): ) summarydocuments = fields.Nested(SummarySchema, allow_none=True) redactionlayerid = fields.Int(data_key="redactionlayerid", allow_none=False) + pdfstitchjobattributes = fields.Nested(FeeOverrideSchema, allow_none=True, many=False) + requesttype = fields.Str(data_key="requesttype", allow_none=False) + pdfstitchjobattributes = fields.Nested(FeeOverrideSchema, allow_none=True, many=False) class SummaryRecordSchema(Schema): recordname = fields.Str(data_key="recordname", allow_none=True) @@ -53,4 +58,6 @@ class MCFFinalPackageSchema(Schema): AttributeSchema, many=True, required=True, allow_none=False ) summarydocuments = fields.Nested(MCFSummarySchema, allow_none=True) - redactionlayerid = fields.Int(data_key="redactionlayerid", allow_none=False) \ No newline at end of file + redactionlayerid = fields.Int(data_key="redactionlayerid", allow_none=False) + pdfstitchjobattributes = fields.Nested(FeeOverrideSchema, allow_none=True, many=False) + requesttype = fields.Str(data_key="requesttype", allow_none=False) diff --git a/api/reviewer_api/schemas/redline.py b/api/reviewer_api/schemas/redline.py index 27db2aa60..bdd484b3d 100644 --- a/api/reviewer_api/schemas/redline.py +++ b/api/reviewer_api/schemas/redline.py @@ -29,4 +29,5 @@ class RedlineSchema(Schema): AttributeSchema, many=True, required=True, allow_none=False ) summarydocuments = fields.Nested(SummarySchema, allow_none=True) - redactionlayerid = fields.Int(data_key="redactionlayerid", allow_none=False) \ No newline at end of file + redactionlayerid = fields.Int(data_key="redactionlayerid", allow_none=False) + requesttype = fields.Str(data_key="requesttype", allow_none=False) \ No newline at end of file diff --git a/api/reviewer_api/services/documentservice.py b/api/reviewer_api/services/documentservice.py index 33170c4ba..c11dd6db9 100644 --- a/api/reviewer_api/services/documentservice.py +++ b/api/reviewer_api/services/documentservice.py @@ -251,6 +251,7 @@ def __updatededupestatus(self, dedupes, record): record["deduplicationstatus"] = dedupe["status"] record["filename"] = dedupe["filename"] record["trigger"] = dedupe["trigger"] + record["message"] = dedupe["message"] return record def __updateproperties_old(self, properties, records, record): @@ -417,7 +418,6 @@ def updatedocumentattributes(self, payload, userid): if 'rotatedpages' not in newdocattributes: newdocattributes['rotatedpages'] = {} newdocattributes['rotatedpages'].update(payload["rotatedpages"]) - newdocattributes["divisions"] = payload["divisions"] newRows.append( DocumentAttributes( version=docattributes["version"] + 1, diff --git a/api/reviewer_api/services/jobrecordservice.py b/api/reviewer_api/services/jobrecordservice.py index 587c94bc4..09a74039e 100644 --- a/api/reviewer_api/services/jobrecordservice.py +++ b/api/reviewer_api/services/jobrecordservice.py @@ -6,6 +6,7 @@ from reviewer_api.models.DocumentAttributes import DocumentAttributes from reviewer_api.services.annotationservice import annotationservice from reviewer_api.services.documentpageflagservice import documentpageflagservice +from reviewer_api.models.PDFStitchJobAttributes import PDFStitchJobAttributes from reviewer_api.auth import auth, AuthHelper from datetime import datetime as datetime2 from reviewer_api.utils.constants import FILE_CONVERSION_FILE_TYPES, DEDUPE_FILE_TYPES @@ -129,3 +130,19 @@ def insertpagecalculatorjobstatus(self, message, userid): ) job = PageCalculatorJob.insert(row) return job + + def insertfeeoverridereason(self, message, pdfstitchjobid, userid): + row = PDFStitchJobAttributes( + pdfstitchjobid=pdfstitchjobid, + version=1, + ministryrequestid=message['ministryrequestid'], + attributes=message['pdfstitchjobattributes'], + createdby=userid + ) + job = PDFStitchJobAttributes.insert(row) + return job + + def isbalancefeeoverrodforrequest(self, requestid): + pdfstitchjobattributes= PDFStitchJobAttributes().getpdfstitchjobattributesbyid(requestid) + isbalancefeeoverrode= False if pdfstitchjobattributes is None or not pdfstitchjobattributes else True + return isbalancefeeoverrode diff --git a/api/reviewer_api/services/radactionservice.py b/api/reviewer_api/services/radactionservice.py index f12b53969..cb064ad48 100644 --- a/api/reviewer_api/services/radactionservice.py +++ b/api/reviewer_api/services/radactionservice.py @@ -124,6 +124,11 @@ def triggerdownloadredlinefinalpackage(self, finalpackageschema, userinfo): _jobmessage, userinfo["userid"] ) if job.success: + if 'pdfstitchjobattributes' in finalpackageschema and finalpackageschema['pdfstitchjobattributes'] is not None: + if 'feeoverridereason' in finalpackageschema['pdfstitchjobattributes']: + feeoverridereason= finalpackageschema['pdfstitchjobattributes']['feeoverridereason'] + if feeoverridereason is not None and feeoverridereason != '': + jobrecordservice().insertfeeoverridereason(finalpackageschema,job.identifier,userinfo["userid"]) _message = self.__preparemessageforsummaryservice( finalpackageschema, userinfo, job ) @@ -131,6 +136,14 @@ def triggerdownloadredlinefinalpackage(self, finalpackageschema, userinfo): # redline/final package download: prepare message for zipping service def __preparemessageforsummaryservice(self, messageschema, userinfo, job): + feeoverridereason= '' + pdf_stitch_job_attributes = None + if 'pdfstitchjobattributes' in messageschema: + pdf_stitch_job_attributes = to_json(messageschema['pdfstitchjobattributes']) + if pdf_stitch_job_attributes is not None: + feeoverridereason= json.loads(pdf_stitch_job_attributes).get("feeoverridereason", None) + if feeoverridereason is not None and feeoverridereason != '': + feeoverridereason= userinfo["firstname"]+" "+userinfo["lastname"]+" overrode balance outstanding warning for the following reason: "+feeoverridereason _message = { "jobid": job.identifier, "requestid": -1, @@ -145,7 +158,10 @@ def __preparemessageforsummaryservice(self, messageschema, userinfo, job): "finaloutput": to_json(""), "attributes": to_json(messageschema["attributes"]), "summarydocuments": json.dumps(messageschema["summarydocuments"]), - "redactionlayerid": json.dumps(messageschema["redactionlayerid"]) + "redactionlayerid": json.dumps(messageschema["redactionlayerid"]), + "feeoverridereason":feeoverridereason, + "requesttype": messageschema["requesttype"], + "feeoverridereason":feeoverridereason } return _message diff --git a/api/reviewer_api/utils/enums.py b/api/reviewer_api/utils/enums.py index fec94ac70..00f5682b1 100644 --- a/api/reviewer_api/utils/enums.py +++ b/api/reviewer_api/utils/enums.py @@ -35,6 +35,7 @@ class MinistryTeamWithKeycloackGroup(Enum): AGR = "AGR Ministry Team" AG = "AG Ministry Team" BRD = "BRD Ministry Team" + CAF = "CAF Ministry Team" CAS = "CAS Ministry Team" CITZ = "CITZ Ministry Team" CLB = "CLB Ministry Team" @@ -73,7 +74,8 @@ class MinistryTeamWithKeycloackGroup(Enum): ECC = "ECC Ministry Team" JED = "JED Ministry Team" COR = "COR Ministry Team" - HSG = "HSG Ministry Team" + HSG = "HSG Ministry Team", + LSB = "LSB Ministry Team" @staticmethod def list(): diff --git a/api/reviewer_api/utils/util.py b/api/reviewer_api/utils/util.py index 68414ac3d..796561c9f 100644 --- a/api/reviewer_api/utils/util.py +++ b/api/reviewer_api/utils/util.py @@ -137,6 +137,8 @@ def getbatchconfig(): return _begin, _size, _limit def is_single_redline_package(bcgovcode, packagetype, requesttype): + if packagetype == "consult": + return False if (packagetype == "oipcreview"): return True if REDLINE_SINGLE_PKG_MINISTRIES not in (None, ""): @@ -145,6 +147,6 @@ def is_single_redline_package(bcgovcode, packagetype, requesttype): return True if REDLINE_SINGLE_PKG_MINISTRIES_PERSONAL not in (None, ""): _pkg_ministries_personal = REDLINE_SINGLE_PKG_MINISTRIES_PERSONAL.replace(" ", "").split(',') - if bcgovcode.upper() in _pkg_ministries_personal: + if bcgovcode.upper() in _pkg_ministries_personal and requesttype.upper() == "PERSONAL": return True return False \ No newline at end of file diff --git a/computingservices/DedupeServices/requirements.txt b/computingservices/DedupeServices/requirements.txt index ca7e1b33c..e51e508cd 100644 Binary files a/computingservices/DedupeServices/requirements.txt and b/computingservices/DedupeServices/requirements.txt differ diff --git a/computingservices/DedupeServices/services/dedupeservice.py b/computingservices/DedupeServices/services/dedupeservice.py index 0799beea4..a4af991b6 100644 --- a/computingservices/DedupeServices/services/dedupeservice.py +++ b/computingservices/DedupeServices/services/dedupeservice.py @@ -21,4 +21,4 @@ def processmessage(message): documentspagecalculatorproducerservice().producepagecalculatorevent(pagecalculatormessage, _pagecount, pagecalculatorjobid) except(Exception) as error: print("Exception while processing redis message, func processmessage(p3), Error : {0} ".format(error)) - recordjobend(message, True, traceback.format_exc()) \ No newline at end of file + recordjobend(message, True, error.args[0]) \ No newline at end of file diff --git a/computingservices/DedupeServices/services/s3documentservice.py b/computingservices/DedupeServices/services/s3documentservice.py index 6f4d46047..2129117a4 100644 --- a/computingservices/DedupeServices/services/s3documentservice.py +++ b/computingservices/DedupeServices/services/s3documentservice.py @@ -12,7 +12,11 @@ from html import escape import hashlib import uuid +import boto3 +from botocore.config import Config from re import sub +import fitz +import PyPDF2 from utils import ( gets3credentialsobject, getdedupeproducermessage, @@ -49,6 +53,49 @@ def __getcredentialsbybcgovcode(bcgovcode): return s3cred +def _prepareattachment(producermessage, data, s3uripath, file_name): + attachment = { + "filename": escape(sub("<[0-9]+>", "", file_name, 1)), + "s3uripath": s3uripath, + "attributes": deepcopy(producermessage.attributes), + } + attachment["attributes"]["filesize"] = len(data) + attachment["attributes"][ + "parentpdfmasterid" + ] = producermessage.documentmasterid + attachment["attributes"].pop("batch") + attachment["attributes"].pop("extension") + attachment["attributes"].pop("incompatible") + return attachment + +def _generate_file_attachments(producermessage, reader, auth): + file_attachments = [] + for page in reader.pages: + if "/Annots" in page: + annotations = page["/Annots"] + for annotation in annotations: + subtype = annotation.get_object()["/Subtype"] + if subtype == "/FileAttachment": + # Placeholder logic to handle pdf attachments+embedds. Once resources available to revise feature, and extract attachments + embedds into one new parent PDF, this error handling will be removed. + raise Exception("PDF contains attachments and/or embedded files. File must be manually fixed and replaced") + + # Old logic to extract embedded files. Uncomment when new feature to save pdf embedds + attachemnts as one file is started. + # producermessage.attributes["hasattachment"] = True + # fileobj = annotation.get_object()["/FS"] + # file = fileobj["/F"] + # data = fileobj["/EF"]["/F"].get_data() + # # data = BytesIO(data).getvalue() + # s3uripath = ( + # path.splitext(producermessage.s3filepath)[0] + # + "/" + # + "{0}{1}".format(uuid.uuid4(), path.splitext(file)[1]) + # ) + # uploadresponse = requests.put(s3uripath, data=data, auth=auth) + # uploadresponse.raise_for_status() + # attachment = _prepareattachment(producermessage, data, s3uripath, file) + # file_attachments.append(attachment) + return file_attachments + def gets3documenthashcode(producermessage): s3credentials = __getcredentialsbybcgovcode(producermessage.bcgovcode) s3_access_key_id = s3credentials.s3accesskey @@ -85,7 +132,11 @@ def gets3documenthashcode(producermessage): if "/Collection" in reader.trailer["/Root"]: producermessage.attributes["isportfolio"] = True else: - producermessage.attributes["hasattachment"] = True + # Placeholder logic to handle pdf attachments+embedds. Once resources available to revise feature, and extract attachments + embedds into one new parent PDF, this error handling will be removed. + raise Exception("PDF contains attachments and/or embedded files. File must be manually fixed and replaced") + + # Old logic to extract attached files. Uncomment when new feature to save pdf embedds + attachemnts as one file is started. + # producermessage.attributes["hasattachment"] = True for name in reader.attachments: s3uripath = ( path.splitext(filepath)[0] @@ -95,18 +146,7 @@ def gets3documenthashcode(producermessage): data = b"".join(reader.attachments[name]) uploadresponse = requests.put(s3uripath, data=data, auth=auth) uploadresponse.raise_for_status() - attachment = { - "filename": escape(sub("<[0-9]+>", "", name, 1)), - "s3uripath": s3uripath, - "attributes": deepcopy(producermessage.attributes), - } - attachment["attributes"]["filesize"] = len(data) - attachment["attributes"][ - "parentpdfmasterid" - ] = producermessage.documentmasterid - attachment["attributes"].pop("batch") - attachment["attributes"].pop("extension") - attachment["attributes"].pop("incompatible") + attachment = _prepareattachment(producermessage, data, s3uripath, name) attachments.append(attachment) saveresponse = requests.post( request_management_api @@ -119,6 +159,57 @@ def gets3documenthashcode(producermessage): }, ) saveresponse.raise_for_status() + + # New logic to extract embedded file attachments (classified under annotations in the PDF) from pages in PDF + # Before looping of pdf pages started; confirm if annotations exist in the pdf using pyMuPdf library (fitz) + fitz_reader = fitz.open(stream=BytesIO(response.content), filetype="pdf") + if (fitz_reader.has_annots()): + file_attachments = _generate_file_attachments(producermessage, reader, auth) + if (len(file_attachments) > 0): + saveresponse = requests.post( + request_management_api + + "/api/foirecord/-1/ministryrequest/" + + producermessage.ministryrequestid, + data=json.dumps({"records": file_attachments}), + headers={ + "Authorization": producermessage.usertoken, + "Content-Type": "application/json", + } + ) + saveresponse.raise_for_status() + fitz_reader.close() + + # clear metadata + reader2 = PyPDF2.PdfReader(BytesIO(response.content)) + # Check if metadata exists. + if reader2.metadata is not None: + # Create a new PDF file without metadata. + writer = PyPDF2.PdfWriter() + # Copy pages from the original PDF to the new PDF. + for page_num in range(len(reader.pages)): + page = reader2.pages[page_num] + writer.add_page(page) + #writer.remove_links() # to remove comments. + buffer = BytesIO() + writer.write(buffer) + client = boto3.client('s3',config=Config(signature_version='s3v4'), + endpoint_url='https://{0}/'.format(dedupe_s3_host), + aws_access_key_id= s3_access_key_id, + aws_secret_access_key= s3_secret_access_key, + region_name= dedupe_s3_region + ) + copyresponse = client.copy_object( + CopySource="/" + "/".join(filepath.split("/")[3:]), # /Bucket-name/path/filename + Bucket=filepath.split("/")[3], # Destination bucket + Key= "/".join(filepath.split("/")[4:])[:-4] + 'ORIGINAL' + '.pdf' # Destination path/filename + ) + uploadresponse = requests.put( + filepath, + data=buffer.getvalue(), + auth=auth + ) + uploadresponse.raise_for_status() + elif extension.lower() in file_conversion_types: # "Extension different {0}, so need to download pdf here for pagecount!!".format(extension)) pdfresponseofconverted = requests.get( diff --git a/computingservices/DedupeServices/utils/foidedupeconfig.py b/computingservices/DedupeServices/utils/foidedupeconfig.py index 6e648af28..a51710bc2 100644 --- a/computingservices/DedupeServices/utils/foidedupeconfig.py +++ b/computingservices/DedupeServices/utils/foidedupeconfig.py @@ -18,7 +18,6 @@ dedupe_db_user = os.getenv("DEDUPE_DB_USER") dedupe_db_password = os.getenv("DEDUPE_DB_PASSWORD") -dedupe_s3_host = os.getenv("DEDUPE_S3_HOST") dedupe_s3_host = os.getenv("DEDUPE_S3_HOST") dedupe_s3_region = os.getenv("DEDUPE_S3_REGION") dedupe_s3_service = os.getenv("DEDUPE_S3_SERVICE") diff --git a/computingservices/DocumentServices/rstreamio/message/schemas/redactionsummary.py b/computingservices/DocumentServices/rstreamio/message/schemas/redactionsummary.py index 979e4d18f..b3a6d67f5 100644 --- a/computingservices/DocumentServices/rstreamio/message/schemas/redactionsummary.py +++ b/computingservices/DocumentServices/rstreamio/message/schemas/redactionsummary.py @@ -30,7 +30,7 @@ def __init__(self, sorteddocuments, pkgdocuments) -> None: class RedactionSummaryMessage(object): def __init__(self, jobid, requestid, ministryrequestid, category, requestnumber, - bcgovcode, createdby, filestozip, finaloutput, attributes, summarydocuments ,redactionlayerid) -> None: + bcgovcode, createdby, filestozip, finaloutput, attributes, summarydocuments ,redactionlayerid, requesttype, feeoverridereason) -> None: self.jobid = jobid self.requestid = requestid self.ministryrequestid = ministryrequestid @@ -43,6 +43,9 @@ def __init__(self, jobid, requestid, ministryrequestid, category, requestnumber, self.attributes = attributes self.summarydocuments = summarydocuments self.redactionlayerid = redactionlayerid + self.feeoverridereason = feeoverridereason + self.requesttype = requesttype + self.feeoverridereason = feeoverridereason def get_in_redactionsummary_msg(producer_json): diff --git a/computingservices/DocumentServices/services/dts/redactionsummary.py b/computingservices/DocumentServices/services/dts/redactionsummary.py index fcff74a52..bb08b2a84 100644 --- a/computingservices/DocumentServices/services/dts/redactionsummary.py +++ b/computingservices/DocumentServices/services/dts/redactionsummary.py @@ -2,15 +2,17 @@ from rstreamio.message.schemas.redactionsummary import get_in_summary_object,get_in_summarypackage_object import json from collections import defaultdict +import traceback class redactionsummary(): def prepareredactionsummary(self, message, documentids, pageflags, programareas): - if message.bcgovcode == 'mcf': + _ismcfpersonalrequest = True if message.bcgovcode == 'mcf' and message.requesttype == 'personal' else False + if _ismcfpersonalrequest and message.category == "responsepackage": redactionsummary = self.__packagesummaryforcfdrequests(message, documentids) else: redactionsummary = self.__packaggesummary(message, documentids, pageflags, programareas) - if message.category == "responsepackage" and message.bcgovcode != 'mcf': + if message.category == "responsepackage" and _ismcfpersonalrequest == False: consolidated_redactions = [] for entry in redactionsummary['data']: consolidated_redactions += entry['sections'] @@ -32,7 +34,8 @@ def __packaggesummary(self, message, documentids, pageflags, programareas): ordereddocids = summaryobject.sorteddocuments stitchedpagedata = documentpageflag().getpagecount_by_documentid(message.ministryrequestid, ordereddocids) totalpagecount = self.__calculate_totalpages(stitchedpagedata) - print("\ntotalpagecount",totalpagecount) + print("\n __packaggesummary stitchedpagedata",stitchedpagedata) + print("\n __packaggesummary totalpagecount",totalpagecount) if totalpagecount <=0: return @@ -40,33 +43,49 @@ def __packaggesummary(self, message, documentids, pageflags, programareas): print("\n_pageflags",_pageflags) summarydata = [] docpageflags = documentpageflag().get_documentpageflag(message.ministryrequestid, redactionlayerid, ordereddocids) + print("\n docpageflags",docpageflags) deletedpages = self.__getdeletedpages(message.ministryrequestid, ordereddocids) skippages= [] pagecount = 0 - for docid in ordereddocids: - if docid in documentids: - docdeletedpages = deletedpages[docid] if docid in deletedpages else [] - docpageflag = docpageflags[docid] - for pageflag in _pageflags: - filteredpages = self.__get_pages_by_flagid(docpageflag["pageflag"], docdeletedpages, pagecount, pageflag["pageflagid"], message.category) - if len(filteredpages) > 0: - originalpagenos = [pg['originalpageno'] for pg in filteredpages] - docpagesections = documentpageflag().getsections_by_documentid_pageno(redactionlayerid, docid, originalpagenos) - docpageconsults = self.__get_consults_by_pageno(programareas, docpageflag["pageflag"], filteredpages) - pageflag['docpageflags'] = pageflag['docpageflags'] + self.__get_pagesection_mapping(filteredpages, docpagesections, docpageconsults) - skippages = self.__get_skippagenos(docpageflag['pageflag'], message.category) - pagecount = (pagecount+stitchedpagedata[docid]["pagecount"])-len(skippages) - print("\n_pageflags1",_pageflags) - for pageflag in _pageflags: - _data = {} - if len(pageflag['docpageflags']) > 0: + try: + for docid in ordereddocids: + if docid in documentids: + docdeletedpages = deletedpages[docid] if docid in deletedpages else [] + if docpageflags is not None and docid in docpageflags.keys(): + docpageflag = docpageflags[docid] + for pageflag in _pageflags: + filteredpages = self.__get_pages_by_flagid(docpageflag["pageflag"], docdeletedpages, pagecount, pageflag["pageflagid"], message.category) + if len(filteredpages) > 0: + originalpagenos = [pg['originalpageno'] for pg in filteredpages] + docpagesections = documentpageflag().getsections_by_documentid_pageno(redactionlayerid, docid, originalpagenos) + docpageconsults = self.__get_consults_by_pageno(programareas, docpageflag["pageflag"], filteredpages) + pageflag['docpageflags'] = pageflag['docpageflags'] + self.__get_pagesection_mapping(filteredpages, docpagesections, docpageconsults) + skippages = self.__get_skippagenos(docpageflag['pageflag'], message.category) + if stitchedpagedata is not None: + pagecount = (pagecount+stitchedpagedata[docid]["pagecount"])-len(skippages) + print("\n_pageflags1",_pageflags) + for pageflag in _pageflags: _data = {} - _data["flagname"] = pageflag["header"].upper() - _data["pagecount"] = len(pageflag['docpageflags']) - _data["sections"] = self.__format_redaction_summary(pageflag["description"], pageflag['docpageflags'], message.category) - summarydata.append(_data) + if len(pageflag['docpageflags']) > 0: + _data = {} + _data["flagname"] = pageflag["header"].upper() + _data["pagecount"] = len(pageflag['docpageflags']) + _data["sections"] = self.__format_redaction_summary(pageflag["description"], pageflag['docpageflags'], message.category) + summarydata.append(_data) + #remove duplicate and NR for oipc review redline + def removeduplicateandnr(pageflag): + if pageflag['flagname'].lower() != 'duplicate' and pageflag['flagname'].lower() != 'not responsive': + return True + return False + if message.category == "oipcreviewredline": + print("\n removing duplicate and not responsive pages from summary") + summarydata = list(filter(removeduplicateandnr, summarydata)) + except (Exception) as err: + traceback.print_exc() + print('error occured in __packaggesummary redaction dts service: ', err) return {"requestnumber": message.requestnumber, "data": summarydata} except (Exception) as error: + traceback.print_exc() print('error occured in redaction dts service: ', error) @@ -89,34 +108,35 @@ def __packagesummaryforcfdrequests(self, message, documentids): docpageflags = documentpageflag().get_documentpageflag(message.ministryrequestid, redactionlayerid, ordereddocids) sorted_docpageflags = {k: docpageflags[k] for k in ordereddocids} - print("============>sorted_docpageflags:", sorted_docpageflags) + # print("============>sorted_docpageflags:", sorted_docpageflags) deletedpages = self.__getdeletedpages(message.ministryrequestid, ordereddocids) #print("============>deletedpages:", deletedpages) mapped_flags = self.process_page_flags(sorted_docpageflags,deletedpages) #print("###mapped_flags1:",mapped_flags) pagecounts= self.count_pages_per_doc(mapped_flags) - print("pagecounts:",pagecounts) + # print("pagecounts:",pagecounts) #document_pages = self.__get_document_pages(docpageflags) #original_pages = self.__adjust_original_pages(document_pages) end_page = 0 for record in records: - print("-----------------------Record : ---------------------------", record["documentids"]) - record_range, totalpagecount1,end_page = self.__createrecordpagerange(record, pagecounts,end_page ) - print(f"Range for each record- record_range:{record_range} &&& totalpagecount1:{totalpagecount1} \ - &&& end_page-{end_page}") - self.assignfullpagesections(redactionlayerid, mapped_flags) - print("\nMapped_flags::",mapped_flags) - range_result = self.__calculate_range(mapped_flags, record["documentids"]) - print("range_result:",range_result) - recordwise_pagecount = next((record["pagecount"] for record in record_range if record["recordname"] == record['recordname'].upper()), 0) - print(f"{record['recordname']} :{recordwise_pagecount}") - summarydata.append(self.__create_summary_data(record, range_result, mapped_flags, recordwise_pagecount)) - - print("\n summarydata:",summarydata) + if record["documentids"][0] in pagecounts: + # print("-----------------------Record : ---------------------------", record["documentids"]) + record_range, totalpagecount1,end_page = self.__createrecordpagerange(record, pagecounts,end_page ) + # print(f"Range for each record- record_range:{record_range} &&& totalpagecount1:{totalpagecount1} \ + # &&& end_page-{end_page}") + self.assignfullpagesections(redactionlayerid, mapped_flags) + # print("\nMapped_flags::",mapped_flags) + range_result = self.__calculate_range(mapped_flags, record["documentids"]) + # print("range_result:",range_result) + recordwise_pagecount = next((record["pagecount"] for record in record_range if record["recordname"] == record['recordname'].upper()), 0) + # print(f"{record['recordname']} :{recordwise_pagecount}") + summarydata.append(self.__create_summary_data(record, range_result, mapped_flags, recordwise_pagecount)) + + # print("\n summarydata:",summarydata) return {"requestnumber": message.requestnumber, "data": summarydata} except Exception as error: - print('Error occurred in redaction dts service: ', error) + print('CFD Error occurred in redaction dts service: ', error) def __calculate_range(self, mapped_flags, docids): @@ -132,17 +152,17 @@ def __calculate_range(self, mapped_flags, docids): grouped_flags= self.__groupbysections(filtered_mapper) ranges = self.__create_ranges(grouped_flags) - print("\n ranges:",ranges) - return {"range": f"{min_stitched_page}-{max_stitched_page}" if min_stitched_page != max_stitched_page else f"{min_stitched_page}", "flagged_range":ranges} + # print("\n ranges:",ranges) + return {"range": f"{min_stitched_page} - {max_stitched_page}" if min_stitched_page != max_stitched_page else f"{min_stitched_page}", "flagged_range":ranges} def assignfullpagesections(self, redactionlayerid, mapped_flags): document_pages= self.get_sorted_original_pages_by_docid(mapped_flags) - print("document_pages:",document_pages) + # print("document_pages:",document_pages) for item in document_pages: for doc_id, pages in item.items(): docpagesections = documentpageflag().getsections_by_documentid_pageno(redactionlayerid, doc_id, pages) - print(f"\n doc_id-{doc_id}, docpagesections-{docpagesections}") + # print(f"\n doc_id-{doc_id}, docpagesections-{docpagesections}") for flag in mapped_flags: if flag['docid'] == doc_id and flag['flagid'] == 3: flag['sections']= self.__get_sections_mcf1(docpagesections, flag['dbpageno']) @@ -151,7 +171,7 @@ def assignfullpagesections(self, redactionlayerid, mapped_flags): def __get_sections_mcf1(self, docpagesections, pageno): sections = [] filtered = [x for x in docpagesections if x['pageno'] == pageno] - print(f"\n pageno-{pageno}, filtered-{filtered}") + # print(f"\n pageno-{pageno}, filtered-{filtered}") if filtered: for dta in filtered: sections += [x.strip() for x in dta['section'].split(",")] @@ -185,12 +205,12 @@ def __createrecordpagerange(self, record, pagecounts, previous_end_page=0): totalpagecount1 += pagecounts[doc_id] if totalpagecount1 == 0: - return [], previous_end_page + return [], totalpagecount1, previous_end_page start_page = previous_end_page + 1 end_page = previous_end_page + totalpagecount1 - range_string = f"{start_page}-{end_page}" if totalpagecount1 > 1 else f"{start_page}" + range_string = f"{start_page} - {end_page}" if totalpagecount1 > 1 else f"{start_page}" result = { "recordname": record['recordname'].upper(), "range": range_string, @@ -258,7 +278,7 @@ def process_page_flags(self,docpageflags, deletedpages): def __groupbysections(self, filtered_mapper): - print("\n __groupbysections: ", filtered_mapper) + # print("\n __groupbysections: ", filtered_mapper) # Group by sections grouped_flags = defaultdict(list) for flag in filtered_mapper: @@ -266,7 +286,7 @@ def __groupbysections(self, filtered_mapper): sections_key = tuple(flag['sections']) if 'sections' in flag and flag['sections'] else ('No Section',) grouped_flags[sections_key].append(flag) grouped_flags = dict(grouped_flags) - print("\n grouped_flags:", grouped_flags) + # print("\n grouped_flags:", grouped_flags) return grouped_flags @@ -286,14 +306,14 @@ def __create_ranges(self, grouped_flags): if start == prev: range_list.append(f"{start}") else: - range_list.append(f"{start}-{prev}") + range_list.append(f"{start} - {prev}") start = page prev = page # Add the last range if start == prev: range_list.append(f"{start}") else: - range_list.append(f"{start}-{prev}") + range_list.append(f"{start} - {prev}") # Save the range list for the current sections_key ranges[sections_key] = range_list return ranges @@ -323,7 +343,7 @@ def generate_text(self, range_result): # Format the section information formatted_sections = f"{pageflag} under {sections_str}" if sections_str else "" # Append the formatted text to the section list - section_list.append({"formatted" :f"{range_item} were {formatted_sections}" if formatted_sections else range_item}) + section_list.append({"formatted" :f"pg(s). {range_item} {formatted_sections}" if formatted_sections else range_item}) return section_list @@ -419,10 +439,10 @@ def __get_pagesection_mapping(self, docpages, docpagesections, docpageconsults): def __get_sections(self, docpagesections, pageno): - print(f"\n pageno-{pageno}, docpagesections-{docpagesections}") + # print(f"\n pageno-{pageno}, docpagesections-{docpagesections}") sections = [] filtered = [x for x in docpagesections if x['pageno'] == pageno] - print("\n filtered:",filtered) + # print("\n filtered:",filtered) for dta in filtered: sections += [x.strip() for x in dta['section'].split(",")] return list(filter(None, sections)) @@ -439,7 +459,7 @@ def __get_pages_by_flagid(self, _docpageflags, deletedpages, totalpages, flagid, def __get_skippagenos(self, _docpageflags, category): skippages = [] - if category in ['responsepackage', 'CFD_responsepackage']: + if category in ['responsepackage', 'CFD_responsepackage', 'oipcreviewredline']: for x in _docpageflags: if x['flagid'] in (5,6) and x['page'] not in skippages: skippages.append(x['page']) @@ -447,7 +467,7 @@ def __get_skippagenos(self, _docpageflags, category): def __calcstitchedpageno(self, pageno, totalpages, category, skippages, deletedpages): skipcount = 0 - if category in ["responsepackage", 'CFD_responsepackage']: + if category in ["responsepackage", 'CFD_responsepackage', 'oipcreviewredline']: skipcount = self.__calculateskipcount(pageno, skippages) skipcount = self.__calculateskipcount(pageno, deletedpages, skipcount) return (pageno+totalpages)-skipcount diff --git a/computingservices/DocumentServices/services/redactionsummaryservice.py b/computingservices/DocumentServices/services/redactionsummaryservice.py index 2984c52b1..adf5ba6d7 100644 --- a/computingservices/DocumentServices/services/redactionsummaryservice.py +++ b/computingservices/DocumentServices/services/redactionsummaryservice.py @@ -13,32 +13,37 @@ class redactionsummaryservice(): def processmessage(self,incomingmessage): summaryfilestozip = [] message = get_in_redactionsummary_msg(incomingmessage) + print('\n 1. get_in_redactionsummary_msg is : {0}'.format(message)) try: + category = message.category + #Condition to handle consults packaages (no summary files to be created) + if category == "consultpackage": + return summaryfilestozip pdfstitchjobactivity().recordjobstatus(message,3,"redactionsummarystarted") summarymsg = message.summarydocuments #Condition for handling oipcredline category bcgovcode= message.bcgovcode - category = message.category - if bcgovcode == 'mcf': + requesttype = message.requesttype + if bcgovcode == 'mcf' and requesttype == 'personal' and category == 'responsepackage': documenttypename= 'CFD_responsepackage_redaction_summary' else: documenttypename= category+"_redaction_summary" if category == 'responsepackage' else "redline_redaction_summary" - #print('documenttypename', documenttypename) + print('\n 2. documenttypename', documenttypename) upload_responses=[] pageflags = self.__get_pageflags(category) programareas = documentpageflag().get_all_programareas() messageattributes= json.loads(message.attributes) - #print("\nmessageattributes:",messageattributes) + print("\n 3. messageattributes:",messageattributes) divisiondocuments = get_in_summary_object(summarymsg).pkgdocuments - #print("\n divisiondocuments:",divisiondocuments) + print("\n 4. divisiondocuments:",divisiondocuments) for entry in divisiondocuments: #print("\n entry:",entry) if 'documentids' in entry and len(entry['documentids']) > 0 : - # print("\n entry['divisionid']:",entry['divisionid']) + print("\n 5. entry['divisionid']:",entry['divisionid']) divisionid = entry['divisionid'] documentids = entry['documentids'] formattedsummary = redactionsummary().prepareredactionsummary(message, documentids, pageflags, programareas) - #print("formattedsummary", formattedsummary) + print("\n 6. formattedsummary", formattedsummary) template_path='templates/'+documenttypename+'.docx' redaction_summary= documentgenerationservice().generate_pdf(formattedsummary, documenttypename,template_path) divisioname = None @@ -58,7 +63,7 @@ def processmessage(self,incomingmessage): s3uricategoryfolder = category s3uri = stitcheddocs3uri.split(s3uricategoryfolder+"/")[0] + s3uricategoryfolder+"/" filename =self.__get_summaryfilename(message.requestnumber, category, divisioname, stitcheddocfilename) - print("\n filename:",filename) + print("\n redaction_summary.content length: {0}".format(len(redaction_summary.content))) uploadobj= uploadbytes(filename,redaction_summary.content,s3uri) upload_responses.append(uploadobj) if uploadobj["uploadresponse"].status_code == 200: @@ -68,10 +73,11 @@ def processmessage(self,incomingmessage): summaryuploaderror= True summaryuploaderrormsg = uploadobj.uploadresponse.text pdfstitchjobactivity().recordjobstatus(message,4,"redactionsummaryuploaded",summaryuploaderror,summaryuploaderrormsg) - print("\ns3uripath:",uploadobj["documentpath"]) + # print("\ns3uripath:",uploadobj["documentpath"]) summaryfilestozip.append({"filename": uploadobj["filename"], "s3uripath":uploadobj["documentpath"]}) return summaryfilestozip except (Exception) as error: + traceback.print_exc() print('error occured in redaction summary service: ', error) pdfstitchjobactivity().recordjobstatus(message,4,"redactionsummaryfailed",str(error),"summary generation failed") return summaryfilestozip @@ -86,7 +92,7 @@ def __get_summaryfilename(self, requestnumber, category, divisionname, stitchedd _filename = requestnumber+" - "+category if divisionname not in (None, ''): _filename = _filename+" - "+divisionname - print("---->",stitchedfilepath+_filename+" - summary.pdf") + # print("---->",stitchedfilepath+_filename+" - summary.pdf") return stitchedfilepath+_filename+" - summary.pdf" def __get_pageflags(self, category): diff --git a/computingservices/DocumentServices/services/zippingservice.py b/computingservices/DocumentServices/services/zippingservice.py index c997af363..06644da8c 100644 --- a/computingservices/DocumentServices/services/zippingservice.py +++ b/computingservices/DocumentServices/services/zippingservice.py @@ -11,14 +11,16 @@ def sendtozipper(self, summaryfiles, message): def preparemessageforzipperservice(self,summaryfiles, message): try: msgjson= json.loads(message) + msgjson.pop('requesttype', None) if summaryfiles and len(summaryfiles) > 0: filestozip_list = json.loads(msgjson['filestozip'])+summaryfiles else: - filestozip_list = msgjson['filestozip'] + filestozip_list = json.loads(msgjson['filestozip']) print('filestozip_list: ', filestozip_list) msgjson['filestozip'] = self.to_json(filestozip_list) msgjson['attributes'] = self.to_json(msgjson['attributes']) - msgjson['summarydocuments'] = self.to_json(msgjson['summarydocuments']) + msgjson['summarydocuments'] = self.to_json(msgjson['summarydocuments']) + return msgjson except (Exception) as error: print('error occured in zipping service: ', error) diff --git a/computingservices/DocumentServices/templates/CFD_responsepackage_redaction_summary.docx b/computingservices/DocumentServices/templates/CFD_responsepackage_redaction_summary.docx index feac3cba3..fa9a6034a 100644 Binary files a/computingservices/DocumentServices/templates/CFD_responsepackage_redaction_summary.docx and b/computingservices/DocumentServices/templates/CFD_responsepackage_redaction_summary.docx differ diff --git a/computingservices/DocumentServices/templates/redline_redaction_summary.docx b/computingservices/DocumentServices/templates/redline_redaction_summary.docx index e08da9f00..35b681462 100644 Binary files a/computingservices/DocumentServices/templates/redline_redaction_summary.docx and b/computingservices/DocumentServices/templates/redline_redaction_summary.docx differ diff --git a/computingservices/DocumentServices/templates/responsepackage_redaction_summary.docx b/computingservices/DocumentServices/templates/responsepackage_redaction_summary.docx index 86a3730ab..e81ba1b3f 100644 Binary files a/computingservices/DocumentServices/templates/responsepackage_redaction_summary.docx and b/computingservices/DocumentServices/templates/responsepackage_redaction_summary.docx differ diff --git a/computingservices/PageCountCalculator/services/dal/pagecount/ministryservice.py b/computingservices/PageCountCalculator/services/dal/pagecount/ministryservice.py index a358236b4..8d415ce29 100644 --- a/computingservices/PageCountCalculator/services/dal/pagecount/ministryservice.py +++ b/computingservices/PageCountCalculator/services/dal/pagecount/ministryservice.py @@ -11,7 +11,7 @@ def getlatestrecordspagecount(cls, ministryrequestid): cursor = conn.cursor() query = ''' SELECT recordspagecount - FROM public."FOIMinistryRequests" + FROM "FOIMinistryRequests" WHERE foiministryrequestid = %s::integer AND isactive = true ORDER BY version DESC LIMIT 1; ''' @@ -33,7 +33,7 @@ def updaterecordspagecount(cls, ministryrequestid, pagecount, userid): try: cursor = conn.cursor() query = ''' - UPDATE public."FOIMinistryRequests" SET recordspagecount = %s::integer, updated_at = %s, updatedby = %s + UPDATE "FOIMinistryRequests" SET recordspagecount = %s::integer, updated_at = %s, updatedby = %s WHERE foiministryrequestid = %s::integer AND isactive = true; ''' parameters = (pagecount, datetime.now().isoformat(), userid, ministryrequestid,) diff --git a/computingservices/ZippingServices/models/redlineresponsenotificationmessage.py b/computingservices/ZippingServices/models/redlineresponsenotificationmessage.py index 5028e8783..cc562164f 100644 --- a/computingservices/ZippingServices/models/redlineresponsenotificationmessage.py +++ b/computingservices/ZippingServices/models/redlineresponsenotificationmessage.py @@ -1,6 +1,7 @@ class redlineresponsenotificationmessage(object): - def __init__(self, ministryrequestid, serviceid, errorflag, createdby) -> None: + def __init__(self, ministryrequestid, serviceid, errorflag, createdby,feeoverridereason="") -> None: self.ministryrequestid = ministryrequestid self.serviceid = serviceid self.errorflag = errorflag self.createdby = createdby + self.feeoverridereason=feeoverridereason diff --git a/computingservices/ZippingServices/models/zipperproducermessage.py b/computingservices/ZippingServices/models/zipperproducermessage.py index deee421e1..41b175753 100644 --- a/computingservices/ZippingServices/models/zipperproducermessage.py +++ b/computingservices/ZippingServices/models/zipperproducermessage.py @@ -1,5 +1,5 @@ class zipperproducermessage(object): - def __init__(self,jobid,requestid,category,requestnumber,bcgovcode,createdby,ministryrequestid,filestozip,finaloutput,attributes,summarydocuments=None,redactionlayerid=None,foldername=None) -> None: + def __init__(self,jobid,requestid,category,requestnumber,bcgovcode,createdby,ministryrequestid,filestozip,finaloutput,attributes,feeoverridereason=None,summarydocuments=None,redactionlayerid=None,foldername=None) -> None: self.jobid = jobid self.requestid = requestid self.category=category @@ -13,3 +13,4 @@ def __init__(self,jobid,requestid,category,requestnumber,bcgovcode,createdby,min self.foldername = foldername self.summarydocuments = summarydocuments self.redactionlayerid = redactionlayerid + self.feeoverridereason= feeoverridereason \ No newline at end of file diff --git a/computingservices/ZippingServices/requirements.txt b/computingservices/ZippingServices/requirements.txt index 6e9366875..a08f010df 100644 Binary files a/computingservices/ZippingServices/requirements.txt and b/computingservices/ZippingServices/requirements.txt differ diff --git a/computingservices/ZippingServices/services/notificationservice.py b/computingservices/ZippingServices/services/notificationservice.py index f237393f9..5fc5f1c5c 100644 --- a/computingservices/ZippingServices/services/notificationservice.py +++ b/computingservices/ZippingServices/services/notificationservice.py @@ -44,6 +44,7 @@ def __responsepackagepublishtostream(self, message, error=False): serviceid="pdfstitchforresponsepackage", createdby=message.createdby, errorflag=self.__booltostr(error), + feeoverridereason= message.feeoverridereason ) logging.info( diff --git a/computingservices/ZippingServices/services/zipperservice.py b/computingservices/ZippingServices/services/zipperservice.py index dddcc827d..312ad7562 100644 --- a/computingservices/ZippingServices/services/zipperservice.py +++ b/computingservices/ZippingServices/services/zipperservice.py @@ -14,7 +14,7 @@ from .notificationservice import notificationservice import json import traceback - +import PyPDF2 def processmessage(message): try: @@ -110,8 +110,22 @@ def __zipfilesandupload(_message, s3credentials): for fileobj in _jsonfiles: filename = fileobj["filename"] print("\nfilename:",filename) + + _docbytes = __getdocumentbytearray(fileobj, s3credentials) + _formattedbytes = None + + try: + _formattedbytes = __removesensitivecontent(_docbytes) + if _formattedbytes is not None: + print("_formattedbytes length is {0}".format(len(_formattedbytes))) + else: + print("_formattedbytes is none") + except Exception: + print("error happened while removing sensitive content of {0} ".format(filename)) + print(traceback.format_exc()) + #added a space to try out code merge on git. 18-Sept-2024 zip.writestr( - filename, __getdocumentbytearray(fileobj, s3credentials) + filename, _docbytes if _formattedbytes is None else _formattedbytes ) tp.seek(0) @@ -136,6 +150,22 @@ def __zipfilesandupload(_message, s3credentials): finally: zipped_bytes = None +def __removesensitivecontent(documentbytes): + # clear metadata + reader2 = PyPDF2.PdfReader(BytesIO(documentbytes)) + # Check if metadata exists. + #if reader2.metadata is not None: + # Create a new PDF file without metadata. + writer = PyPDF2.PdfWriter() + # Copy pages from the original PDF to the new PDF. + for page_num in range(len(reader2.pages)): + page = reader2.pages[page_num] + writer.add_page(page) + #writer.remove_links() # to remove comments. + buffer = BytesIO() + writer.write(buffer) + return buffer.getvalue() + def __getzipfilepath(foldername, filename): return ( @@ -143,3 +173,20 @@ def __getzipfilepath(foldername, filename): if foldername is not None else filename + ".zip" ) + + +def __removesensitivecontent(documentbytes): + # clear metadata + reader2 = PyPDF2.PdfReader(BytesIO(documentbytes)) + # Check if metadata exists. + if reader2.metadata is not None: + # Create a new PDF file without metadata. + writer = PyPDF2.PdfWriter() + # Copy pages from the original PDF to the new PDF. + for page_num in range(len(reader2.pages)): + page = reader2.pages[page_num] + writer.add_page(page) + #writer.remove_links() # to remove comments. + buffer = BytesIO() + writer.write(buffer) + return buffer.getvalue() \ No newline at end of file diff --git a/web/public/stylesheets/webviewer.css b/web/public/stylesheets/webviewer.css index 9084bb2bc..bbf51b3c6 100644 --- a/web/public/stylesheets/webviewer.css +++ b/web/public/stylesheets/webviewer.css @@ -19,6 +19,11 @@ cursor: not-allowed !important; } +.consult_package:disabled { + color: #999999 !important; + cursor: not-allowed !important; +} + .file-upload-toast { .Toastify__toast-body { > div:last-child { diff --git a/web/src/actions/actionConstants.ts b/web/src/actions/actionConstants.ts index cbae36c3d..a93f87dce 100644 --- a/web/src/actions/actionConstants.ts +++ b/web/src/actions/actionConstants.ts @@ -20,6 +20,7 @@ const ACTION_CONSTANTS = { INC_REDACTION_LAYER: "INC_REDACTION_LAYER", SET_REQUEST_NUMBER:"SET_REQUEST_NUMBER", SET_DELETED_PAGES: "SET_DELETED_PAGES", + SET_PUBLIC_BODIES: "SET_PUBLIC_BODIES", FOI_PERSONAL_SECTIONS: "FOI_PERSONAL_SECTIONS", FOI_PERSONAL_PEOPLE: "FOI_PERSONAL_PEOPLE", FOI_PERSONAL_FILETYPES: "FOI_PERSONAL_FILETYPES", diff --git a/web/src/actions/documentActions.ts b/web/src/actions/documentActions.ts index da519f073..22b5ff4b4 100644 --- a/web/src/actions/documentActions.ts +++ b/web/src/actions/documentActions.ts @@ -1,5 +1,13 @@ import ACTION_CONSTANTS from "./actionConstants"; +type PublicBody = { + bcgovcode: string, + iaocode: string, + name: string, + isactive: boolean, + type: string, + programareaid: number +} export const setRedactionInfo = (data: any) => (dispatch:any) =>{ dispatch({ @@ -8,7 +16,6 @@ export const setRedactionInfo = (data: any) => (dispatch:any) =>{ }) } - export const setIsPageLeftOff = (data: any) => (dispatch:any) =>{ dispatch({ type:ACTION_CONSTANTS.SET_IS_PAGE_LEFT_OFF, @@ -93,6 +100,13 @@ export const setDeletedPages = (data: any) => (dispatch:any) =>{ }) } +export const setPublicBodies = (data: PublicBody[]) => (dispatch:any) =>{ + dispatch({ + type:ACTION_CONSTANTS.SET_PUBLIC_BODIES, + payload:data + }) +} + export const setFOIPersonalSections = (data: any) => (dispatch:any) =>{ dispatch({ type:ACTION_CONSTANTS.FOI_PERSONAL_SECTIONS, @@ -116,4 +130,4 @@ export const setFOIPersonalSections = (data: any) => (dispatch:any) =>{ type:ACTION_CONSTANTS.FOI_PERSONAL_VOLUMES, payload:data }) - } \ No newline at end of file + } diff --git a/web/src/apiManager/services/docReviewerService.tsx b/web/src/apiManager/services/docReviewerService.tsx index 03ed64e41..b840059e3 100644 --- a/web/src/apiManager/services/docReviewerService.tsx +++ b/web/src/apiManager/services/docReviewerService.tsx @@ -4,7 +4,7 @@ import API from "../endpoints"; import UserService from "../../services/UserService"; import { setRedactionInfo, setIsPageLeftOff, setSections, setDocumentList, setRequestStatus, setRedactionLayers, incrementLayerCount, setRequestNumber, setRequestInfo, setDeletedPages, - setFOIPersonalSections, setFOIPersonalPeople, setFOIPersonalFiletypes, setFOIPersonalVolumes + setFOIPersonalSections, setFOIPersonalPeople, setFOIPersonalFiletypes, setFOIPersonalVolumes, setPublicBodies } from "../../actions/documentActions"; import { store } from "../../services/StoreService"; import { number } from "yargs"; @@ -36,7 +36,6 @@ export const fetchDocuments = ( store.dispatch(setRequestNumber(res.data.requestnumber) as any); store.dispatch(setRequestStatus(res.data.requeststatuslabel) as any); store.dispatch(setRequestInfo(res.data.requestinfo) as any); - // callback(__files, res.data.documentdivisions, res.data.requestinfo); callback(res.data.documents, res.data.documentdivisions, res.data.requestinfo); } else { throw new Error(); @@ -305,6 +304,7 @@ export const fetchPageFlagsMasterData = ( .then((res:any) => { if (res.data || res.data === "") { callback(res.data); + store.dispatch(setPublicBodies(res.data.find((flag: any) => flag.name === 'Consult').programareas)); } else { throw new Error(); } @@ -544,7 +544,6 @@ export const fetchPersonalAttributes = ( httpGETRequest(apiUrlGet, {}, UserService.getToken()) .then((res:any) => { if (res.data) { - console.log("fetchPersonalAttributes: ", res.data); store.dispatch(setFOIPersonalPeople(res.data) as any); store.dispatch(setFOIPersonalFiletypes(res.data) as any); store.dispatch(setFOIPersonalVolumes(res.data) as any); diff --git a/web/src/apiManager/services/foiOSSService.tsx b/web/src/apiManager/services/foiOSSService.tsx index bb170b92a..82a79a962 100644 --- a/web/src/apiManager/services/foiOSSService.tsx +++ b/web/src/apiManager/services/foiOSSService.tsx @@ -59,6 +59,8 @@ export const getFOIS3DocumentRedlinePreSignedUrl = ( if (layertype === "oipcreview") { apiurl = apiurl + "/oipcreview" + } else if (layertype === "consult") { + apiurl = apiurl + "/consult" } else { apiurl = apiurl + "/" + layer } diff --git a/web/src/components/FOI/App.scss b/web/src/components/FOI/App.scss index f518f21da..4c00c7c64 100644 --- a/web/src/components/FOI/App.scss +++ b/web/src/components/FOI/App.scss @@ -82,6 +82,7 @@ li.modal-message-list-item { margin: 6px 0; + font-size: 15px; } #state-change-dialog-title .MuiIconButton-root { @@ -144,6 +145,12 @@ li.modal-message-list-item { height: calc(100% - 198px); } +.modal-content{ + padding: 20px 30px !important; + overflow-y: visible !important; + height: calc(100% - 198px); +} + .section-list{ >li:nth-child(odd) { background-color: #E5EAEF; @@ -221,6 +228,10 @@ li.modal-message-list-item { min-height: 350px !important; } +.consult-modal { + min-height: 600px !important; +} + .redline-checkmark { height:14px; width:14px; diff --git a/web/src/components/FOI/Home/ConfirmationModal.js b/web/src/components/FOI/Home/ConfirmationModal.js index 651cf6e8c..b4f1dc370 100644 --- a/web/src/components/FOI/Home/ConfirmationModal.js +++ b/web/src/components/FOI/Home/ConfirmationModal.js @@ -9,9 +9,10 @@ import DialogContentText from "@mui/material/DialogContentText"; import DialogTitle from "@mui/material/DialogTitle"; import CloseIcon from "@mui/icons-material/Close"; import IconButton from "@mui/material/IconButton"; +import Grid from '@mui/material/Grid'; +import { Tooltip } from '@mui/material'; //import type { ReactModalProps } from './types'; - export const ConfirmationModal= ({ cancelRedaction, redlineModalOpen, @@ -22,8 +23,16 @@ export const ConfirmationModal= ({ handleIncludeDuplicantePages, isDisableNRDuplicate, saveDoc, - modalData + modalData, + documentPublicBodies, + handleSelectedPublicBodies, + selectedPublicBodyIDs, + consultApplyRedactions, + handleApplyRedactions, + consultApplyRedlines, + handleApplyRedlines }) => { + let disableConsultSaveButton = modalData?.modalFor === "consult" && selectedPublicBodyIDs.length < 1; return ( @@ -72,11 +81,79 @@ export const ConfirmationModal= ({ /> } + {modalData?.modalFor === "consult" && + <> + + {documentPublicBodies?.map((publicBody) => { + return (<> + + + + + + + ) + })} + +
+

More Options:

+ + +
+ + +
+ + +
+ + + } - + )} + {isOverride && ( + + )} + + +
+ ); +}; + +export default FeeOverrideModal; diff --git a/web/src/components/FOI/Home/Home.js b/web/src/components/FOI/Home/Home.js index c018633fb..233fb64dc 100644 --- a/web/src/components/FOI/Home/Home.js +++ b/web/src/components/FOI/Home/Home.js @@ -45,6 +45,8 @@ function Home() { const [warningModalOpen, setWarningModalOpen] = useState(false); const [divisions, setDivisions] = useState([]); const [pageFlags, setPageFlags]= useState([]); + const [isBalanceFeeOverrode , setIsBalanceFeeOverrode] = useState(false); + const [outstandingBalance, setOutstandingBalance]= useState(0); const redliningRef = useRef(); const selectorRef = useRef(); @@ -66,17 +68,19 @@ function Home() { fetchDocuments( parseInt(foiministryrequestid), - async (data, documentDivisions, _requestInfo) => { + async (documents, documentDivisions, _requestInfo) => { setDivisions(documentDivisions); + setOutstandingBalance(_requestInfo.outstandingbalance) + setIsBalanceFeeOverrode(_requestInfo.balancefeeoverrodforrequest) const getFileExt = (filepath) => { const parts = filepath.split(".") const fileExt = parts.pop() return fileExt } // New code added to get the incompatable files for download redline - // data has all the files including incompatable ones + // documents has all the files including incompatable ones // _files has all files except incompatable ones - const _incompatableFiles = data.filter( + const _incompatableFiles = documents.filter( (d) => { const isPdfFile = getFileExt(d.filepath) === "pdf" if (isPdfFile) { @@ -87,7 +91,7 @@ function Home() { } ); setIncompatibleFiles(_incompatableFiles); - const _files = data.filter((d) => { + const _files = documents.filter((d) => { const isPdfFile = getFileExt(d.filepath) === "pdf" const isCompatible = !d.attributes.incompatible || isPdfFile return isCompatible @@ -105,11 +109,10 @@ function Home() { }); let doclist = []; - let requestInfo = _requestInfo; + let requestInfo = _requestInfo.requestinfo; getFOIS3DocumentPreSignedUrls( documentObjs, (newDocumentObjs) => { - console.log(requestInfo) sortDocList(newDocumentObjs, null, doclist, requestInfo); //prepareMapperObj will add sortorder, stitchIndex and totalPageCount to doclist //and prepare the PageMappedDocs object @@ -300,9 +303,10 @@ function Home() { incompatibleFiles={incompatibleFiles} setWarningModalOpen={setWarningModalOpen} scrollLeftPanel={scrollLeftPanel} + isBalanceFeeOverrode={isBalanceFeeOverrode} + outstandingBalance={outstandingBalance} pageFlags={pageFlags} syncPageFlagsOnAction={syncPageFlagsOnAction} - /> ) // :
Loading
diff --git a/web/src/components/FOI/Home/MCFPersonal.js b/web/src/components/FOI/Home/MCFPersonal.js index 258ec1019..173ccb264 100644 --- a/web/src/components/FOI/Home/MCFPersonal.js +++ b/web/src/components/FOI/Home/MCFPersonal.js @@ -26,7 +26,7 @@ const MCFPersonal = ({ setEditTagModalOpen, setOpenContextPopup, setNewDivision, - // tagValue, + comparePersonalAttributes, curPersonalAttributes, setNewPersonalAttributes, updatePersonalAttributes, @@ -69,16 +69,29 @@ const MCFPersonal = ({ const [fileTypeSearchValue, setFileTypeSearchValue] = useState(""); const [additionalFileTypes, setAdditionalFileTypes] = useState([]); const [showAdditionalFileTypes, setShowAdditionalFileTypes] = useState(false); + const [disableSave, setDisableSave] = useState(false); useEffect(() => { setPersonalAttributes(curPersonalAttributes); },[curPersonalAttributes]) + useEffect(() => { + setDisableSave( + personalAttributes?.person === undefined + || personalAttributes?.person === "" + || personalAttributes?.filetype === undefined + || personalAttributes?.filetype === "" + || personalAttributes?.trackingid === undefined + || personalAttributes?.trackingid === "" + || comparePersonalAttributes(personalAttributes, curPersonalAttributes) + ); + },[personalAttributes]) + useEffect(() => { if(MCFSections?.sections) { if(MCFSections.sections.length > MCFPopularSections-1) { setTagList(MCFSections.sections.slice(0, MCFPopularSections-1)); - setOtherTagList(MCFSections.sections.slice(MCFPopularSections)); + setOtherTagList(MCFSections.sections.slice(MCFPopularSections-1)); } else { setTagList(MCFSections.sections); setOtherTagList([]); @@ -97,7 +110,7 @@ const MCFPersonal = ({ },[MCFPeople]) useEffect(() => { - if(MCFVolumes?.volumes) { + if(!!MCFFiletypes && MCFVolumes?.volumes) { if(MCFFiletypes.filetypes.length > 5) { setVolumes(MCFVolumes.volumes.slice(0, 5)); } else { @@ -109,8 +122,8 @@ const MCFPersonal = ({ useEffect(() => { if(MCFFiletypes?.filetypes) { if(MCFFiletypes.filetypes.length > 6) { - setFileTypes(MCFFiletypes.filetypes.slice(0, 6)); - setOtherFileTypes(MCFFiletypes.filetypes.slice(6, MCFFiletypes.filetypes.length)) + setFileTypes(MCFFiletypes.filetypes.slice(0, 8)); + setOtherFileTypes(MCFFiletypes.filetypes.slice(8, MCFFiletypes.filetypes.length)) } else { setFileTypes(MCFFiletypes.filetypes); setOtherFileTypes([]) @@ -152,6 +165,16 @@ const MCFPersonal = ({ } },[showAllPeople, showAllVolumes]) + React.useEffect(() => { + if(MCFPeople.people.length > 0 && personalAttributes.person !== "") { + setShowAllPeople( MCFPeople.people.filter(p => p.name==personalAttributes.person)[0]?.sortorder >= 5 ); + } + + if(MCFVolumes.volumes.length > 0 && personalAttributes.volume !== "") { + setShowAllVolumes( MCFVolumes.volumes.filter(v => v.name==personalAttributes.volume)[0]?.sortorder >= 5 ); + } + },[personalAttributes]) + React.useEffect(() => { setAdditionalFileTypes(searchFileTypes(otherFileTypes, fileTypeSearchValue, personalAttributes?.filetype)); },[fileTypeSearchValue, otherFileTypes, personalAttributes]) @@ -166,7 +189,7 @@ const MCFPersonal = ({ _sectionArray.map((section) => { if(_keyword && section.name.toLowerCase().includes(_keyword.toLowerCase())) { newSectionArray.push(section); - } else if(section.divisionid === _selectedSectionValue) { + } else if(section.name === _selectedSectionValue) { newSectionArray.unshift(section); } }); @@ -221,6 +244,8 @@ const MCFPersonal = ({ }; const handleClose = () => { + setSearchValue(""); + setFileTypeSearchValue(""); setCurrentEditRecord(); setCurPersonalAttributes({ person: "", @@ -234,6 +259,11 @@ const MCFPersonal = ({ setOpenContextPopup(false); }; + const reset = () => { + setSearchValue(""); + setFileTypeSearchValue(""); + }; + const handleFileTypeSearchKeywordChange = (keyword) => { setFileTypeSearchValue(keyword); } @@ -583,13 +613,15 @@ const MCFPersonal = ({ diff --git a/web/src/components/FOI/Home/Redlining.js b/web/src/components/FOI/Home/Redlining.js index 3fd5fc417..4b02a814c 100644 --- a/web/src/components/FOI/Home/Redlining.js +++ b/web/src/components/FOI/Home/Redlining.js @@ -53,10 +53,12 @@ import { createFinalPackageSelection, createOIPCForReviewSelection, createRedlineForSignOffSelection, - createResponsePDFMenu, + createResponsePDFMenu, + createConsultPackageSelection, handleFinalPackageClick, handleRedlineForOipcClick, handleRedlineForSignOffClick, + handleConsultPackageClick, renderCustomButton, isValidRedlineDownload, isReadyForSignOff } from "./CreateResponsePDF/CreateResponsePDF"; @@ -66,6 +68,7 @@ import {ConfirmationModal} from "./ConfirmationModal"; import { FOIPPASectionsModal } from "./FOIPPASectionsModal"; import { NRWarningModal } from "./NRWarningModal"; import Switch from "@mui/material/Switch"; +import FeeOverrideModal from "./FeeOverrideModal"; const Redlining = React.forwardRef( ( @@ -81,8 +84,10 @@ const Redlining = React.forwardRef( incompatibleFiles, setWarningModalOpen, scrollLeftPanel, + isBalanceFeeOverrode, + outstandingBalance, pageFlags, - syncPageFlagsOnAction + syncPageFlagsOnAction, }, ref ) => { @@ -101,6 +106,7 @@ const Redlining = React.forwardRef( const currentLayer = useSelector((state) => state.documents?.currentLayer); const deletedDocPages = useAppSelector((state) => state.documents?.deletedDocPages); const validoipcreviewlayer = useAppSelector((state) => state.documents?.requestinfo?.validoipcreviewlayer); + const requestType = useAppSelector((state) => state.documents?.requestinfo?.requesttype); const viewer = useRef(null); const [documentList, setDocumentList] = useState([]); @@ -137,9 +143,15 @@ const Redlining = React.forwardRef( const [modalData, setModalData] = useState(null); const [enableRedactionPanel, setEnableRedactionPanel] = useState(false); const [clickRedactionPanel, setClickRedactionPanel] = useState(false); + const [pagesRemoved, setPagesRemoved] = useState([]); const [redlineModalOpen, setRedlineModalOpen] = useState(false); const [isDisableNRDuplicate, setIsDisableNRDuplicate] = useState(false); + const [pageSelectionsContainNRDup, setPageSelectionsContainNRDup] = useState(false); + const [outstandingBalanceModal, setOutstandingBalanceModal] = useState(false); + const [isOverride, setIsOverride]= useState(false); + const [feeOverrideReason, setFeeOverrideReason]= useState(""); + //xml parser const parser = new XMLParser(); /**Response Package && Redline download and saving logic (react custom hooks)*/ @@ -151,10 +163,19 @@ const Redlining = React.forwardRef( saveRedlineDocument, enableSavingOipcRedline, enableSavingRedline, + enableSavingConsults, checkSavingRedline, checkSavingOIPCRedline, + checkSavingConsults, setRedlineCategory, setFilteredComments, + setSelectedPublicBodyIDs, + setConsultApplyRedactions, + selectedPublicBodyIDs, + documentPublicBodies, + consultApplyRedactions, + setConsultApplyRedlines, + consultApplyRedlines, } = useSaveRedlineForSignoff(docInstance, docViewer); const { saveResponsePackage, @@ -231,6 +252,7 @@ const Redlining = React.forwardRef( const redlineForSignOffBtn = createRedlineForSignOffSelection(document, enableSavingRedline); const redlineForOipcBtn = createOIPCForReviewSelection(document, enableSavingOipcRedline); const finalPackageBtn = createFinalPackageSelection(document, enableSavingFinal); + const consultPackageButton = createConsultPackageSelection(document, enableSavingConsults); redlineForOipcBtn.onclick = () => { handleRedlineForOipcClick(updateModalData, setRedlineModalOpen); }; @@ -238,11 +260,16 @@ const Redlining = React.forwardRef( handleRedlineForSignOffClick(updateModalData, setRedlineModalOpen); }; finalPackageBtn.onclick = () => { - handleFinalPackageClick(updateModalData, setRedlineModalOpen); + handleFinalPackageClick(updateModalData, setRedlineModalOpen, outstandingBalance, + isBalanceFeeOverrode,setOutstandingBalanceModal,setIsOverride); + }; + consultPackageButton.onclick = () => { + handleConsultPackageClick(updateModalData, setRedlineModalOpen, setIncludeDuplicatePages, setIncludeNRPages) }; menu.appendChild(redlineForOipcBtn); menu.appendChild(redlineForSignOffBtn); menu.appendChild(finalPackageBtn); + menu.appendChild(consultPackageButton); parent.appendChild(menu); //Create render function to render custom Create Reseponse PDF button @@ -444,12 +471,12 @@ const Redlining = React.forwardRef( }) var x = 0, y = 0 - documentViewer.addEventListener("mouseLeftDown", async (event) => { + documentViewer.addEventListener("mouseRightDown", async (event) => { x = event.pageX; y = event.pageY; }); - documentViewer.addEventListener("mouseLeftUp", async (event) => { + documentViewer.addEventListener("mouseRightUp", async (event) => { if (window.Math.abs(event.pageX - x) < 2 && window.Math.abs(event.pageY - y) < 2) { scrollLeftPanel(event, documentViewer.getCurrentPage()); } @@ -591,6 +618,7 @@ const Redlining = React.forwardRef( }, []); const updateModalData = (newModalData) => { + setRedlineCategory(newModalData.modalFor); setModalData(newModalData); }; @@ -924,7 +952,7 @@ const Redlining = React.forwardRef( docversion: displayedDoc.docversion, isFullPage: isFullPage } - const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "delete"); + const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "delete", pageFlags); if (pageFlagsUpdated) { pageFlagObj.push(pageFlagsUpdated); } @@ -991,7 +1019,7 @@ const Redlining = React.forwardRef( let individualPageNo; await removeRedactAnnotationDocContent(annotations); - + if (annotations[0].Subject === "Redact") { let pageSelectionList = [...pageSelections]; annots[0].children?.forEach((annotatn, i) => { @@ -1088,7 +1116,7 @@ const Redlining = React.forwardRef( docid: displayedDoc.docid, docversion: displayedDoc.docversion, } - const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "add"); + const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "add", pageFlags); if (pageFlagsUpdated) { pageFlagObj.push(pageFlagsUpdated); } @@ -1383,6 +1411,7 @@ const Redlining = React.forwardRef( const validRedlineDownload = isValidRedlineDownload(pageFlags); const redlineReadyAndValid = readyForSignOff && validRedlineDownload; const oipcRedlineReadyAndValid = (validoipcreviewlayer === true && currentLayer.name.toLowerCase() === "oipc") && readyForSignOff; + checkSavingConsults(documentList, _instance); checkSavingRedline(redlineReadyAndValid, _instance); checkSavingOIPCRedline(oipcRedlineReadyAndValid, _instance, readyForSignOff); checkSavingFinalPackage(redlineReadyAndValid, _instance); @@ -1396,7 +1425,7 @@ const Redlining = React.forwardRef( if (docInstance && documentList.length > 0) { const document = docInstance?.UI.iframeWindow.document; document.getElementById("create_response_pdf").addEventListener("click", handleCreateResponsePDFClick); - docViewer.setWatermark({ + docViewer?.setWatermark({ // Draw custom watermark in middle of the document custom: (ctx, pageNumber, pageWidth, pageHeight) => { // ctx is an instance of CanvasRenderingContext2D @@ -1404,8 +1433,9 @@ const Redlining = React.forwardRef( // Hence being able to leverage those properties let originalPage = pageMappedDocs['stitchedPageLookup'][pageNumber] let doc = pageFlags.find(d => d.documentid === originalPage.docid); - let pageFlag = doc.pageflag.find(f => f.page === originalPage.page); - if (pageFlag.flagid === pageFlagTypes["Duplicate"]) { + let pageFlagsOnPage = doc?.pageflag?.filter(f => f.page === originalPage.page); + let NrOrDupeFlag = pageFlagsOnPage?.find(pageFlagItem => pageFlagItem.flagid === pageFlagTypes["Duplicate"] || pageFlagItem.flagid === pageFlagTypes["Not Responsive"]); + if (NrOrDupeFlag?.flagid === pageFlagTypes["Duplicate"]) { ctx.fillStyle = "#ff0000"; ctx.font = "20pt Arial"; ctx.globalAlpha = 0.4; @@ -1417,7 +1447,7 @@ const Redlining = React.forwardRef( ctx.restore(); } - if (pageFlag.flagid === pageFlagTypes["Not Responsive"]) { + if (NrOrDupeFlag?.flagid === pageFlagTypes["Not Responsive"]) { ctx.fillStyle = "#ff0000"; ctx.font = "20pt Arial"; ctx.globalAlpha = 0.4; @@ -1430,6 +1460,8 @@ const Redlining = React.forwardRef( } }, }); + docViewer?.refreshAll(); + docViewer?.updateView(); } //Cleanup Function: removes previous event listeiner to ensure handleCreateResponsePDFClick event is not called multiple times on click return () => { @@ -1523,6 +1555,9 @@ const Redlining = React.forwardRef( let username = docViewer?.getAnnotationManager()?.getCurrentUser(); for (const entry in annotData) { let xml = parser.parseFromString(annotData[entry]); + // import redactions first, free text later, so translucent redaction won't cover free text + let xmlAnnotsChildren_redaction = []; + let xmlAnnotsChildren_others = []; for (let annot of xml.getElementsByTagName("annots")[0].children) { let txt = domParser.parseFromString( annot.getElementsByTagName("trn-custom-data")[0].attributes.bytes, @@ -1536,6 +1571,12 @@ const Redlining = React.forwardRef( (p) => p.pageNo - 1 === Number(originalPageNo) )?.stitchedPageNo - 1 )?.toString(); + if(annot.attributes.subject === "Redact") { + xmlAnnotsChildren_redaction.push(annot); + } else { + xmlAnnotsChildren_others.push(annot); + } + xml.getElementsByTagName("annots")[0].children = [...xmlAnnotsChildren_redaction, ...xmlAnnotsChildren_others]; } xml = parser.toString(xml); const _annotations = await annotManager.importAnnotations(xml); @@ -1701,7 +1742,7 @@ const Redlining = React.forwardRef( docid: displayedDoc.docid, docversion: displayedDoc.docversion, } - const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "edit"); + const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "edit", pageFlags); if (pageFlagsUpdated) { pageFlagObj.push(pageFlagsUpdated); } @@ -1882,7 +1923,7 @@ const Redlining = React.forwardRef( pageSelectionList ); const pageFlagObj = []; - const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "edit"); + const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "edit", pageFlags); if (pageFlagsUpdated) { pageFlagObj.push(pageFlagsUpdated); } @@ -2180,8 +2221,14 @@ const Redlining = React.forwardRef( }, [deleteQueue, newRedaction]); const cancelRedaction = () => { - setModalOpen(false); - setMessageModalOpen(false); + if(outstandingBalance > 0 && !isBalanceFeeOverrode){ + setIsOverride(false) + setOutstandingBalanceModal(false) + } + else{ + setModalOpen(false); + setMessageModalOpen(false); + } setSelectedPageFlagId(null); setSelectedSections([]); setSaveDisabled(true); @@ -2228,13 +2275,48 @@ const Redlining = React.forwardRef(
, ], }); - setMessageModalOpen(true) + setMessageModalOpen(true); + } + + const setMessageModalForNrDuplicatePriority = () => { + updateModalData({ + modalTitle: "Selected page(s) currently have NR or Duplicate flag applied", + modalMessage: [ +
    +
  • Please note, your redaction(s) have been applied on your selected page(s). However, to flag your selected page(s) as Withheld In Full, you must first change your selected page(s) flags to In Progress.
  • +
  • After your selected page(s) are flagged as In Progress you may proceed to mark them as Withheld in Full.
  • +
+ ], + }); } useEffect(() => { if (!newRedaction) return; const astrType = decodeAstr(newRedaction.astr)['trn-redaction-type'] || ''; const hasFullPageRedaction = astrType === "fullPage"; + // logic to alert the user that a withheld in full pageflag/redaction was applied to a page with an existing duplicate or nr pageflag. + let hasNROrDuplicateFlag = false; + if (selectedPageFlagId === pageFlagTypes["Withheld in Full"] || hasFullPageRedaction) { + const pageFlagsMap = new Map(); + for (let docPageFlags of pageFlags) { + pageFlagsMap.set(docPageFlags.documentid, docPageFlags.pageflag); + } + for (let pageObj of pageSelections) { + if (hasNROrDuplicateFlag) { + break; + } + const pageFlagList = pageFlagsMap.get(pageObj.docid); + if (pageFlagList) { + for (let flagObj of pageFlagList) { + if (flagObj.page === pageObj.page && (flagObj.flagid === pageFlagTypes["Not Responsive"] || flagObj.flagid === pageFlagTypes["Duplicate"])) { + hasNROrDuplicateFlag = true; + break; + } + } + } + } + } + setPageSelectionsContainNRDup(hasNROrDuplicateFlag); if (newRedaction.names?.length > REDACTION_SELECT_LIMIT) { setWarningModalOpen(true); @@ -2243,6 +2325,9 @@ const Redlining = React.forwardRef( saveRedaction(); } else if (defaultSections.length == 0 && !hasFullPageRedaction) { setModalOpen(true); + } else if (hasNROrDuplicateFlag) { + setModalOpen(true); + setMessageModalForNrDuplicatePriority(); } else if (selectedPageFlagId === pageFlagTypes["Withheld in Full"] && defaultSections.length > 0) { setMessageModalForNotResponsive(); } else if (hasFullPageRedaction) { @@ -2280,6 +2365,15 @@ const Redlining = React.forwardRef( const cancelSaveRedlineDoc = () => { disableNRDuplicate(); setRedlineModalOpen(false); + setSelectedPublicBodyIDs([]); + setConsultApplyRedactions(false); + setConsultApplyRedlines(false); + if(outstandingBalance > 0 && !isBalanceFeeOverrode){ + setOutstandingBalanceModal(false) + setIsOverride(false) + } + else + setRedlineModalOpen(false); }; const handleIncludeNRPages = (e) => { @@ -2289,8 +2383,35 @@ const Redlining = React.forwardRef( const handleIncludeDuplicantePages = (e) => { setIncludeDuplicatePages(e.target.checked); }; + + const handleApplyRedactions = (e) => { + setConsultApplyRedactions(e.target.checked); + } + + const handleApplyRedlines = (e) => { + setConsultApplyRedlines(e.target.checked); + if (consultApplyRedactions) { + setConsultApplyRedactions(false); + } + } + + const handleSelectedPublicBodies = (e) => { + let publicBodyId = !isNaN(parseInt(e.target.value)) ? parseInt(e.target.value) : e.target.value; + if (selectedPublicBodyIDs.includes(publicBodyId)) { + setSelectedPublicBodyIDs((prev) => { + return [...prev.filter(id => id !== publicBodyId)] + }); + } + else { + setSelectedPublicBodyIDs((prev) => { + return [...prev, publicBodyId] + }); + } + } const saveDoc = () => { + setIsOverride(false) + setOutstandingBalanceModal(false) setRedlineModalOpen(false); setRedlineSaving(true); let modalFor= modalData? modalData.modalFor : "" @@ -2300,6 +2421,7 @@ const Redlining = React.forwardRef( switch (modalFor) { case "oipcreview": case "redline": + case "consult": saveRedlineDocument( docInstance, modalFor, @@ -2316,7 +2438,9 @@ const Redlining = React.forwardRef( docInstance, documentList, pageMappedDocs, - pageFlags + pageFlags, + feeOverrideReason, + requestType, ); break; default: @@ -2334,7 +2458,7 @@ const Redlining = React.forwardRef( return trnCustomData } - const NRID = sections?.find(s => s.section === "Not Responsive")?.id; + const NRID = sections?.find(s => s.section === "NR")?.id; const blankID = sections?.find(s => s.section === "")?.id; const sectionIsDisabled = (sectionid) => { @@ -2373,6 +2497,13 @@ const Redlining = React.forwardRef( return isDisabled } + const overrideOutstandingBalance = () => { + setIsOverride(true) + } + const handleOverrideReasonChange = (event) => { + setFeeOverrideReason(event.target.value); + }; + return (
@@ -2385,6 +2516,8 @@ const Redlining = React.forwardRef( handleSectionSelected={handleSectionSelected} editRedacts={editRedacts} saveRedactions={saveRedactions} + pageSelectionsContainNRDup={pageSelectionsContainNRDup} + setMessageModalOpen={setMessageModalOpen} saveDisabled={saveDisabled} saveRedaction={saveRedaction} defaultSections={defaultSections} @@ -2403,6 +2536,13 @@ const Redlining = React.forwardRef( isDisableNRDuplicate={isDisableNRDuplicate} saveDoc={saveDoc} modalData={modalData} + documentPublicBodies={documentPublicBodies} + handleSelectedPublicBodies={handleSelectedPublicBodies} + selectedPublicBodyIDs={selectedPublicBodyIDs} + consultApplyRedactions={consultApplyRedactions} + handleApplyRedactions={handleApplyRedactions} + handleApplyRedlines={handleApplyRedlines} + consultApplyRedlines={consultApplyRedlines} /> } {messageModalOpen && @@ -2412,6 +2552,17 @@ const Redlining = React.forwardRef( modalData={modalData} /> } +
); } diff --git a/web/src/components/FOI/Home/utils.js b/web/src/components/FOI/Home/utils.js index dbcf9974e..5f7e7dda0 100644 --- a/web/src/components/FOI/Home/utils.js +++ b/web/src/components/FOI/Home/utils.js @@ -73,13 +73,16 @@ export const CFDSorting = (a, b) => { b = b.file; } if (a.attributes.personalattributes.person !== b.attributes.personalattributes.person) { - return (a.attributes.personalattributes.person > b.attributes.personalattributes.person) ? 1 : -1 + // return (a.attributes.personalattributes.person > b.attributes.personalattributes.person) ? 1 : -1 + return a.attributes.personalattributes.person.localeCompare(b.attributes.personalattributes.person, undefined, {numeric: true, sensitivity: 'base'}) } else if (a.attributes.personalattributes.filetype !== b.attributes.personalattributes.filetype) { return (a.attributes.personalattributes.filetype > b.attributes.personalattributes.filetype) ? 1 : -1 } else if (a.attributes.personalattributes.trackingid !== b.attributes.personalattributes.trackingid) { - return (a.attributes.personalattributes.trackingid > b.attributes.personalattributes.trackingid) ? 1 : -1 + // return (a.attributes.personalattributes.trackingid > b.attributes.personalattributes.trackingid) ? 1 : -1 + return a.attributes.personalattributes.trackingid.localeCompare(b.attributes.personalattributes.trackingid, undefined, {numeric: true, sensitivity: 'base'}) } else if (a.attributes.personalattributes.volume !== b.attributes.personalattributes.volume) { - return (a.attributes.personalattributes.volume > b.attributes.personalattributes.volume) ? 1 : -1 + // return (a.attributes.personalattributes.volume > b.attributes.personalattributes.volume) ? 1 : -1 + return a.attributes.personalattributes.volume?a.attributes.personalattributes.volume.localeCompare(b.attributes.personalattributes.volume, undefined, {numeric: true, sensitivity: 'base'}) : -1 } return Date.parse(a.created_at) - Date.parse(b.created_at); }; @@ -106,7 +109,7 @@ export const sortDocList = (fullDocList, currentDoc, sortedDocList, requestInfo) if (childDocList.length == 1) { sortedChildDocList = childDocList; } else { - if (requestInfo.bcgovcode === "MCF") { + if (requestInfo?.bcgovcode === "MCF" && requestInfo?.requesttype === "personal") { sortedChildDocList = childDocList.sort(CFDSorting); } else { sortedChildDocList = childDocList.sort(docSorting); @@ -421,7 +424,8 @@ const constructPageFlagsForDelete = ( exisitngAnnotations, displayedDoc, pageFlagTypes, - redactionType + redactionType, + pageFlags ) => { let pagesToUpdate = {}; let found = false; @@ -432,7 +436,16 @@ const constructPageFlagsForDelete = ( (_annotation) => _annotation.getCustomData("trn-redaction-type") == "fullPage" ); - // full page redaction is always have first priority + // NR / Duplicate pageflags takes the first precedence / priority + const foundNROrDuplicateFlagObj = findNROrDuplicatePageFlag(pageFlags, displayedDoc, pageFlagTypes); + if (foundNROrDuplicateFlagObj) { + return { + docid: displayedDoc?.docid, + page: displayedDoc?.page, + flagid: foundNROrDuplicateFlagObj.flagid + }; + } + // full page redaction is the next priority / precedence if (fullPageRedaction.length > 0) { const fullPageSectionsStr = fullPageRedaction[0].getCustomData("sections"); const fullPageSectionValue = getSectionValue(fullPageSectionsStr); @@ -500,14 +513,27 @@ const constructPageFlagsForAddOrEdit = ( annotationsInfo, exisitngAnnotations, displayedDoc, - pageFlagTypes + pageFlagTypes, + pageFlags ) => { let pagesToUpdate = {}; + if (annotationsInfo.section === undefined) { + return getValidObject(pagesToUpdate); // non redaction annotations do not need page flags automatically applied + } const foundBlank = ["", " "].includes(annotationsInfo.section); const foundNR = annotationsInfo.section == "NR"; // section with a valid number found const foundValidSection = !["", " ", "NR"].includes(annotationsInfo.section); - // add/edit - fullPage takes the precedence + // add/edit - NR / Duplicate pageflags takes the first precedence + const foundNROrDuplicateFlagObj = findNROrDuplicatePageFlag(pageFlags, displayedDoc, pageFlagTypes); + if (foundNROrDuplicateFlagObj) { + return { + docid: displayedDoc?.docid, + page: displayedDoc?.page, + flagid: foundNROrDuplicateFlagObj.flagid + }; + } + // add/edit - fullPage takes the next precedence if (annotationsInfo?.redactiontype === "fullPage") { // addition of full page redaction with blank code return "In Progress" page flag. if (foundBlank) { @@ -624,12 +650,14 @@ export const constructPageFlags = ( pageMappedDocs, pageFlagTypes, RedactionTypes, - action = "" + action = "", + pageFlags = [] ) => { - // 1. always withheld in full takes precedence - // 2. then, partial disclosure - // 3. then, NR (full disclosure) - // 4. lastly, BLANK (in progress) + // 1. NR/Dup pageflag takes precedence. If that page flag is applied, no annots made can adjust pageflag + // 2. then, withheld in full takes precedence + // 3. then, partial disclosure + // 4. then, NR (full disclosure) + // 5. lastly, BLANK (in progress) const displayedDoc = pageMappedDocs.stitchedPageLookup[Number(annotationsInfo.stitchpage) + 1]; // get exisitng FreeText annotations on the page @@ -643,7 +671,8 @@ export const constructPageFlags = ( annotationsInfo, _exisitngAnnotations, displayedDoc, - pageFlagTypes + pageFlagTypes, + pageFlags ); } else if (action === "delete") { const redactionType = getRedactionType( @@ -655,14 +684,16 @@ export const constructPageFlags = ( _exisitngAnnotations, displayedDoc, pageFlagTypes, - redactionType + redactionType, + pageFlags ); } else { return constructPageFlagsForAddOrEdit( annotationsInfo, _exisitngAnnotations, displayedDoc, - pageFlagTypes + pageFlagTypes, + pageFlags, ); } }; @@ -752,3 +783,15 @@ export const skipNRDocument = (documentPageFlags, pagecount, pageFlagTypes) => { } return skipdocument; } + +export const findNROrDuplicatePageFlag = (pageFlags, docObj, pageFlagTypes) => { + const docPageFlags = pageFlags.find(pageFlagObj => pageFlagObj.documentid === docObj.docid); + if (!docPageFlags) { + return false; + } + for (let pageFlag of docPageFlags.pageflag) { + if ((pageFlag.page === docObj.page && pageFlag.flagid === pageFlagTypes["Duplicate"]) || (pageFlag.page === docObj.page && pageFlag.flagid === pageFlagTypes["Not Responsive"])) { + return pageFlag; + } + } +} diff --git a/web/src/constants/constants.ts b/web/src/constants/constants.ts index d101b000b..1a76564a0 100644 --- a/web/src/constants/constants.ts +++ b/web/src/constants/constants.ts @@ -10,7 +10,7 @@ export const KEYCLOAK_REALM = export const KEYCLOAK_URL = window._env_?.REACT_APP_KEYCLOAK_URL ?? process.env.REACT_APP_KEYCLOAK_URL ?? "https://dev.oidc.gov.bc.ca"; export const KEYCLOAK_AUTH_URL = `${KEYCLOAK_URL}/auth`; export const ANONYMOUS_USER = "anonymous"; -export const SESSION_SECURITY_KEY = "u7x!A%D*G-KaNdRgUkXp2s5v8y/B?E(H"; +export const SESSION_SECURITY_KEY = window._env_?.REACT_APP_SESSION_SECURITY_KEY ?? process.env.REACT_APP_SESSION_SECURITY_KEY; export const SESSION_LIFETIME = 21600000; export const PDFVIEWER_DISABLED_FEATURES= window._env_?.REACT_APP_PDFVIEWERDISABLED ?? process.env.REACT_APP_PDFVIEWERDISABLED ?? diff --git a/web/src/constants/enum.ts b/web/src/constants/enum.ts index 2338be921..db5f0f832 100644 --- a/web/src/constants/enum.ts +++ b/web/src/constants/enum.ts @@ -100,7 +100,7 @@ const RedactionTypes: RedactionType = { "blank": "blank" }; -const MCFPopularSections = 23 +const MCFPopularSections = 21 export { KCProcessingTeams, diff --git a/web/src/modules/documentReducer.ts b/web/src/modules/documentReducer.ts index 302df096f..be98b7a97 100644 --- a/web/src/modules/documentReducer.ts +++ b/web/src/modules/documentReducer.ts @@ -8,7 +8,8 @@ const initialState = { "description": "Redline", // "sortorder": 1, // "count": 0 - } + }, + allPublicBodies: [], } const documents = (state = initialState, action:any)=> { @@ -41,6 +42,8 @@ const documents = (state = initialState, action:any)=> { return {...state, redactionLayers: state.redactionLayers }; case ACTION_CONSTANTS.SET_DELETED_PAGES: return {...state, deletedDocPages: action.payload}; + case ACTION_CONSTANTS.SET_PUBLIC_BODIES: + return {...state, allPublicBodies: action.payload}; case ACTION_CONSTANTS.FOI_PERSONAL_SECTIONS: return { ...state, foiPersonalSections: action.payload }; case ACTION_CONSTANTS.FOI_PERSONAL_PEOPLE: