diff --git a/.gitignore b/.gitignore index f9395bcd2..3af96fa00 100644 --- a/.gitignore +++ b/.gitignore @@ -102,3 +102,5 @@ computingservices/ZippingServices/env/* openshift/templates/zippingservice/zipper.env *.locenv +MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/result_*.pdf +MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/SourceFiles/result_*.pdf diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDF/CalendarFileProcessor.cs b/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDF/CalendarFileProcessor.cs index fdf2d03c7..079fb37be 100644 --- a/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDF/CalendarFileProcessor.cs +++ b/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDF/CalendarFileProcessor.cs @@ -147,14 +147,22 @@ public CalendarFileProcessor(Stream sourceStream) string organizer = string.Empty; //Organizer Name and Email - if (e.Organizer != null) + if (e.Organizer?.Value != null) { - organizer = e.Organizer.CommonName + "(" + e.Organizer.Value.AbsoluteUri + ")"; + try + { + organizer = e.Organizer?.CommonName + "(" + e.Organizer?.Value.AbsoluteUri + ")"; + } + catch + { + + organizer = @"Unknown Organizer"; + } } else { - organizer = @"Unknown Organizer(mailto:unknownorganizer@calendar.google.com)"; + organizer = @"Unknown Organizer(mailto:unknownorganizer@calendar.bcgov.ca)"; } htmlString.Append(@" From: @@ -174,7 +182,7 @@ public CalendarFileProcessor(Stream sourceStream) //Meeting created timestamp htmlString.Append(@" Sent: - " + e.DtStamp.Date + ""); + " + e.DtStamp.Value + ""); //Priority htmlString.Append(@" @@ -184,12 +192,12 @@ public CalendarFileProcessor(Stream sourceStream) //Meeting Start Timestamp htmlString.Append(@" Start Time: - " + e.DtStart.Date + ""); + " + e.DtStart.Value + ""); //Meeting End Timestamp htmlString.Append(@" End Time: - " + e.DtEnd.Date + ""); + " + e.DtEnd.Value + ""); //Meeting Message string message = @"" + e.Description?.Replace("\n", "
"); message = message.Replace("<br>", "
").Replace("<br/>", "
"); @@ -243,6 +251,22 @@ public CalendarFileProcessor(Stream sourceStream) } + + private DateTime GetPSTTime(DateTime _timetoconvert) + { + DateTime converteddate = _timetoconvert; + if (TimeZone.CurrentTimeZone.StandardName != "Pacific Standard Time" || _timetoconvert.Kind == DateTimeKind.Utc ) + { + + converteddate = TimeZoneInfo.ConvertTimeBySystemTimeZoneId(converteddate, "Pacific Standard Time"); + + } + + return converteddate; + } + + + /// /// Converts HTML string to PDF using syncfution library and blink engine /// diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/CalendarFileProcessorTest.cs b/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/CalendarFileProcessorTest.cs index e457a0e70..76ec57c30 100644 --- a/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/CalendarFileProcessorTest.cs +++ b/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/CalendarFileProcessorTest.cs @@ -42,7 +42,7 @@ public void ProcessSimpleCalendarFilesTest() Dictionary> attachments = new Dictionary>(); string rootFolder = getSourceFolder(); Stream output = new MemoryStream(); - Stream testFile = new FileStream(Path.Combine(getSourceFolder(), "test-cal.ics"), FileMode.Open, FileAccess.Read); + Stream testFile = new FileStream(Path.Combine(getSourceFolder(), "Backlog refinement.ics"), FileMode.Open, FileAccess.Read); CalendarFileProcessor calendarFileProcessor = new CalendarFileProcessor(testFile); calendarFileProcessor.WaitTimeinMilliSeconds = 5000; @@ -50,6 +50,8 @@ public void ProcessSimpleCalendarFilesTest() string outputPath = Path.Combine(getSourceFolder(), "output"); (isProcessed, message, output, attachments) = calendarFileProcessor.ProcessCalendarFiles(); Assert.IsTrue(isProcessed == true, $"Calendar to PDF Conversion failed"); + + SaveStreamAsFile(getSourceFolder(), output, "result_Backlog refinement.pdf"); } [TestMethod] @@ -60,7 +62,7 @@ public void ProcessCalendarFileWithAttachmentsTest() Dictionary> attachments = new Dictionary>(); string rootFolder = getSourceFolder(); Stream output = new MemoryStream(); - Stream testFile = new FileStream(Path.Combine(getSourceFolder(), "test-with-attachments.ics"), FileMode.Open, FileAccess.Read); + Stream testFile = new FileStream(Path.Combine(getSourceFolder(), "Backlog refinement.ics"), FileMode.Open, FileAccess.Read); CalendarFileProcessor calendarFileProcessor = new CalendarFileProcessor(testFile); calendarFileProcessor.WaitTimeinMilliSeconds = 5000; @@ -71,6 +73,8 @@ public void ProcessCalendarFileWithAttachmentsTest() bool isAttachmentsExists = attachments.Count == 2; Assert.IsTrue(isAttachmentsExists, $"Attachments not found"); + + SaveStreamAsFile(getSourceFolder(), output, "result_Backlog refinement.pdf"); } [TestMethod] @@ -81,18 +85,34 @@ public void ProcessComplexCalendarFilesTest() Dictionary> attachments = new Dictionary>(); string rootFolder = getSourceFolder(); Stream output = new MemoryStream(); - Stream testFile = new FileStream(Path.Combine(getSourceFolder(), "test-problematic-calendar.ics"), FileMode.Open, FileAccess.Read); + Stream testFile = new FileStream(Path.Combine(getSourceFolder(), "test-with-attachments.ics"), FileMode.Open, FileAccess.Read); CalendarFileProcessor calendarFileProcessor = new CalendarFileProcessor(testFile); calendarFileProcessor.WaitTimeinMilliSeconds = 5000; calendarFileProcessor.FailureAttemptCount = 10; (isProcessed, message, output, attachments) = calendarFileProcessor.ProcessCalendarFiles(); Assert.IsTrue(isProcessed == true, $"Calendar to PDF Conversion failed"); + + SaveStreamAsFile(getSourceFolder(), output, "result_test-with-attachmentsr.pdf"); } private string getSourceFolder() { - return "C:\\Projects\\foi-docreviewer\\MCS.FOI.S3FileConversion\\MCS.FOI.CalendarToPDFUnitTests\\SourceFiles"; + return "C:\\AOT\\FOI\\Source\\foi-docreviewer\\foi-docreviewer\\MCS.FOI.S3FileConversion\\MCS.FOI.CalendarToPDFUnitTests\\SourceFiles"; + } + + public static void SaveStreamAsFile(string filePath, Stream stream, string fileName) + { + stream.Position = 0; + var path = Path.Combine(filePath, fileName); + var bytesInStream = new byte[stream.Length]; + + stream.Read(bytesInStream, 0, (int)bytesInStream.Length); + + using (var outputFileStream = new FileStream(path, FileMode.Create)) + { + outputFileStream.Write(bytesInStream, 0, bytesInStream.Length); + } } } } diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/SourceFiles/Backlog refinement.ics b/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/SourceFiles/Backlog refinement.ics new file mode 100644 index 000000000..193ce4f28 --- /dev/null +++ b/MCS.FOI.S3FileConversion/MCS.FOI.CalendarToPDFUnitTests/SourceFiles/Backlog refinement.ics @@ -0,0 +1,226 @@ +BEGIN:VCALENDAR +PRODID:-//Microsoft Corporation//Outlook 16.0 MIMEDIR//EN +VERSION:2.0 +METHOD:REQUEST +X-MS-OLK-FORCEINSPECTOROPEN:TRUE +BEGIN:VTIMEZONE +TZID:Pacific Standard Time +BEGIN:STANDARD +DTSTART:16011104T020000 +RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=11 +TZOFFSETFROM:-0700 +TZOFFSETTO:-0800 +END:STANDARD +BEGIN:DAYLIGHT +DTSTART:16010311T020000 +RRULE:FREQ=YEARLY;BYDAY=2SU;BYMONTH=3 +TZOFFSETFROM:-0800 +TZOFFSETTO:-0700 +END:DAYLIGHT +END:VTIMEZONE +BEGIN:VEVENT +ATTENDEE;CN="Abin Antony";ROLE=OPT-PARTICIPANT;RSVP=TRUE:mailto:abin.antony + @aot-technologies.com +ATTENDEE;CN="Antony, Abin CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:invalid:n + omail +ATTENDEE;CN=adam.coard@aot-technologies.com;ROLE=OPT-PARTICIPANT;RSVP=TRUE: + mailto:adam.coard@aot-technologies.com +ATTENDEE;CN="Andrews, Arielle CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:inval + id:nomail +ATTENDEE;CN="Balachandran, Vineet CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:i + nvalid:nomail +ATTENDEE;CN="Coard, Adam CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:invalid:no + mail +ATTENDEE;CN="Divya Viswanath";ROLE=OPT-PARTICIPANT;RSVP=TRUE:mailto:divya.v + @aot-technologies.com +ATTENDEE;CN="Jacklyn Harrietha";ROLE=OPT-PARTICIPANT;RSVP=TRUE:mailto:jackl + yn.harrietha@aot-technologies.com +ATTENDEE;CN="Harrietha, Jacklyn CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:inv + alid:nomail +ATTENDEE;CN="Mullane, Loren CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:invalid + :nomail +ATTENDEE;CN="Prodan, Matthew CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:invali + d:nomail +ATTENDEE;CN="Qi, Richard CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:invalid:no + mail +ATTENDEE;CN='richard.qi@aot-technologies.com';ROLE=OPT-PARTICIPANT;RSVP=TRU + E:mailto:richard.qi@aot-technologies.com +ATTENDEE;CN=sumathi.thirumani@aot-technologies.com;ROLE=OPT-PARTICIPANT;RSV + P=TRUE:mailto:sumathi.thirumani@aot-technologies.com +ATTENDEE;CN="Thirumani, Sumathi CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:inv + alid:nomail +ATTENDEE;CN='vineet.balachandran@aot-technologies.com';ROLE=OPT-PARTICIPANT + ;RSVP=TRUE:mailto:vineet.balachandran@aot-technologies.com +ATTENDEE;CN="Viswanath, Divya CITZ:EX";ROLE=OPT-PARTICIPANT;RSVP=TRUE:inval + id:nomail +ATTENDEE;CN="Aparna S";ROLE=OPT-PARTICIPANT;RSVP=TRUE:mailto:aparna.s@aot-t + echnologies.com +CLASS:PUBLIC +CREATED:20240918T224519Z +DESCRIPTION:Moving back to 2x1 hr sessions based on recent team patterns + – Can extend/remove meetings as \n\n \n\nI’m extending this invite to + everyone\, however the process we’ve agreed to trial for refinement goin + g forward is: \n\n1. Stories are written and shared with the team minimum + 48 hours in advance\n2. All members of the team review stories in their ow + n time\, add questions or discussion points within the card\n3. Team to de + termine who will attend (the technical members of the team in particular h + ave discussed rotating attendance\, and filling each other in during techn + ical standups and sprint planning)\n\n \n\nAs is our practice\, we will ch + eck back if this approach is working after a couple of sprints\n\n________ + ________________________________________________________________________ \ + n\nMicrosoft Teams meeting \n\nJoin on your computer or mobile app \n\nCli + ck here to join the meeting \n\nOr call in (audio + only) \n\n+1 778-401-6289\,\,21501988# + Canada\, Victoria \n\nPhone Conference ID: 215 019 88# \n\nFind a local n + umber | Reset PIN \n\nLearn More | Meeting options < + https://teams.microsoft.com/meetingOptions/?organizerId=adaba6c5-1521-4b0d + -8e1e-11a9bfe1c924&tenantId=6fdb5200-3d0d-4a8a-b036-d3685e359adc&threadId= + 19_meeting_Y2M4ZDFjYmItZDkzOC00MWFmLThhMzYtN2UxN2EwZWZmOGUz@thread.v2&mess + ageId=0&language=en-US> \n\n_____________________________________________ + ___________________________________ \n\n \n\n +DTEND;TZID="Pacific Standard Time":20240917T103000 +DTSTAMP:20211201T212305Z +DTSTART;TZID="Pacific Standard Time":20240917T093000 +LAST-MODIFIED:20240918T224519Z +LOCATION:Microsoft Teams Meeting +ORGANIZER;CN="Pilchar, Molly CITZ:EX":invalid:nomail +PRIORITY:5 +RECURRENCE-ID;TZID="Pacific Standard Time":20240917T093000 +SEQUENCE:210 +SUMMARY;LANGUAGE=en-us:Backlog refinement +TRANSP:OPAQUE +UID:040000008200E00074C5B7101A82E00800000000309A43BC1A3DD701000000000000000 + 01000000099BD2671DADE4B4CAF83DA04954B3577 +X-ALT-DESC;FMTTYPE=text/html:\n

Moving back to 2x1 hr sessions based on recent team patterns + –\; Can extend/remove meetings as

 \;

I’\;m extending this invi + te to everyone\, however the process we’\;ve agreed to trial for refi + nement going forward is:

  1. Stories are written and shared with the team minimum 48 ho + urs in advance
  2. All members of the team review stories i + n their own time\, add questions or discussion points within the card +
  3. Team to determine who will attend (the technical members of + the team in particular have discussed rotating attendance\, and filling e + ach other in during technical standups and sprint planning)

 \;

As i + s our practice\, we will check back if this approach is working after a co + uple of sprints

________________________________________________________________ + ________________

Micr + osoft Teams meeting

______________ + __________________________________________________________________ +

 \;

+X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE +X-MICROSOFT-CDO-IMPORTANCE:1 +X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY +X-MICROSOFT-DISALLOW-COUNTER:FALSE +X-MS-OLK-APPTSEQTIME:20211013T002428Z +BEGIN:VALARM +TRIGGER:-PT15M +ACTION:DISPLAY +DESCRIPTION:Reminder +END:VALARM +END:VEVENT +END:VCALENDAR diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDF/MSGFileProcessor.cs b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDF/MSGFileProcessor.cs index d11b39801..5f4c8e42e 100644 --- a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDF/MSGFileProcessor.cs +++ b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDF/MSGFileProcessor.cs @@ -83,7 +83,7 @@ public MSGFileProcessor(Stream sourceStream) var sentOn = _attachment.SentOn.ToString(); if (!string.IsNullOrEmpty(sentOn)) lastModified = sentOn; - + var attachmentSize = attachmentStream.Length.ToString(); if (string.IsNullOrEmpty(attachmentSize)) attachmentSize = attachmentStream.Capacity.ToString(); @@ -173,7 +173,7 @@ public MSGFileProcessor(Stream sourceStream) var startAt = 0; foreach (var inlineAttachment in inlineAttachments.OrderBy(m => { - int pos = (int) m.GetType().GetProperty("RenderingPosition").GetValue(m, null); + int pos = (int)m.GetType().GetProperty("RenderingPosition").GetValue(m, null); if (pos > -1) { return pos; @@ -186,7 +186,7 @@ public MSGFileProcessor(Stream sourceStream) return match.Index; } })) - { + { if (rtfInline) { if (!inlineAttachment.GetType().FullName.ToLower().Contains("message")) @@ -230,26 +230,26 @@ public MSGFileProcessor(Stream sourceStream) if (width > maxSize && width >= height) { float scale = maxSize / width; - width = (int) (width * scale); - height = (int) (height * scale); + width = (int)(width * scale); + height = (int)(height * scale); } if (height > maxSize) { float scale = maxSize / height; - width = (int) (width * scale); - height = (int) (height * scale); + width = (int)(width * scale); + height = (int)(height * scale); } string widthString = string.Empty; string heightString = string.Empty; if (width > 0) { - widthString = " width =\"" + width +"\""; + widthString = " width =\"" + width + "\""; } if (height > 0) { heightString = " height =\"" + height + "\""; } - string imgReplacementString = ""; + string imgReplacementString = ""; bodyreplaced = regex.Replace(bodyreplaced, imgReplacementString, Int32.MaxValue, startAt); startAt = match.Index + imgReplacementString.Length; } @@ -273,7 +273,7 @@ public MSGFileProcessor(Stream sourceStream) if (!string.IsNullOrEmpty(attachmentsList)) { - htmlString += (@" + htmlString += (@" Attachments: " + attachmentsList.Remove(attachmentsList.Length - 2, 2) + ""); } @@ -283,7 +283,8 @@ public MSGFileProcessor(Stream sourceStream) if (bodyreplaced.Substring(0, 4) == ""); bodyreplaced = bodyreplaced.Insert(bodyStart.Index + bodyStart.Length, htmlString); @@ -537,12 +538,24 @@ private string GenerateHtmlfromMsg(Storage.Message msg) Subject: " + msg.Subject + ""); + DateTime sentDate = Convert.ToDateTime(msg.SentOn); + if(sentDate == DateTime.MinValue) + { + sentDate = Convert.ToDateTime(msg.CreationTime); + } + if (TimeZone.CurrentTimeZone.StandardName != "Pacific Standard Time") + { + + sentDate = TimeZoneInfo.ConvertTimeBySystemTimeZoneId(sentDate, "Pacific Standard Time"); + + } + //Message Sent On timestamp htmlString.Append(@" Sent: - " + msg.SentOn + ""); + " + sentDate + ""); + - //Message body //string message = @"" + msg.BodyText?.Replace("\n", "").Replace("<br>", "")?.Replace("<br/>", ""); diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/MSGFileProcessorTest.cs b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/MSGFileProcessorTest.cs index 1e9848bc6..d4de41a4b 100644 --- a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/MSGFileProcessorTest.cs +++ b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/MSGFileProcessorTest.cs @@ -50,6 +50,8 @@ public void ProcessSimpleMSGFilesTest() msgFileProcessor.FailureAttemptCount = 10; (converted, message, output, attachments) = msgFileProcessor.ConvertToPDF(); Assert.IsTrue(converted == true, $"MSG to PDF Conversion failed for {testFile}"); + + SaveStreamAsFile(getSourceFolder(), output, "result_simple-test-msg-file.pdf"); } [TestMethod] @@ -68,7 +70,7 @@ public void ProcessMSGFileWithAttachmentsTest() (converted, message, output, attachments) = msgFileProcessor.ConvertToPDF(); Assert.IsTrue(converted == true, $"MSG to PDF Conversion failed for {testFile}"); - SaveStreamAsFile(getSourceFolder(), output, "result.pdf"); + SaveStreamAsFile(getSourceFolder(), output, "result_Test-MSG-File-with-Attachments.pdf"); bool isAttachmentsExists = attachments.Count == 3; Assert.IsTrue(isAttachmentsExists, $"MSG PDF file does not exists {testFile}"); diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/Test-MSG-File-with-Attachments.pdf b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/Test-MSG-File-with-Attachments.pdf deleted file mode 100644 index 43ef48bfa..000000000 Binary files a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/Test-MSG-File-with-Attachments.pdf and /dev/null differ diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/simple-test-msg-file.pdf b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/simple-test-msg-file.pdf deleted file mode 100644 index 36d63a55b..000000000 Binary files a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/simple-test-msg-file.pdf and /dev/null differ diff --git a/computingservices/DocumentServices/services/dts/redactionsummary.py b/computingservices/DocumentServices/services/dts/redactionsummary.py index 89fded2ef..539a53e9c 100644 --- a/computingservices/DocumentServices/services/dts/redactionsummary.py +++ b/computingservices/DocumentServices/services/dts/redactionsummary.py @@ -7,6 +7,11 @@ class redactionsummary(): def prepareredactionsummary(self, message, documentids, pageflags, programareas): + def removeduplicateandnr(pageflag): + if pageflag['name'] != 'Duplicate' and pageflag['name'] != 'Not Responsive': + return True + return False + pageflags = list(filter(removeduplicateandnr, pageflags)) _ismcfpersonalrequest = True if message.bcgovcode == 'mcf' and message.requesttype == 'personal' else False if _ismcfpersonalrequest and message.category == "responsepackage": redactionsummary = self.__packagesummaryforcfdrequests(message, documentids) diff --git a/computingservices/ZippingServices/services/zipperservice.py b/computingservices/ZippingServices/services/zipperservice.py index 9718c383a..312ad7562 100644 --- a/computingservices/ZippingServices/services/zipperservice.py +++ b/computingservices/ZippingServices/services/zipperservice.py @@ -115,9 +115,15 @@ def __zipfilesandupload(_message, s3credentials): _formattedbytes = None try: - _formattedbytes = __removesensitivecontent(_docbytes) + _formattedbytes = __removesensitivecontent(_docbytes) + if _formattedbytes is not None: + print("_formattedbytes length is {0}".format(len(_formattedbytes))) + else: + print("_formattedbytes is none") except Exception: + print("error happened while removing sensitive content of {0} ".format(filename)) print(traceback.format_exc()) + #added a space to try out code merge on git. 18-Sept-2024 zip.writestr( filename, _docbytes if _formattedbytes is None else _formattedbytes ) @@ -148,17 +154,17 @@ def __removesensitivecontent(documentbytes): # clear metadata reader2 = PyPDF2.PdfReader(BytesIO(documentbytes)) # Check if metadata exists. - if reader2.metadata is not None: - # Create a new PDF file without metadata. - writer = PyPDF2.PdfWriter() - # Copy pages from the original PDF to the new PDF. - for page_num in range(len(reader2.pages)): - page = reader2.pages[page_num] - writer.add_page(page) - #writer.remove_links() # to remove comments. - buffer = BytesIO() - writer.write(buffer) - return buffer.getvalue() + #if reader2.metadata is not None: + # Create a new PDF file without metadata. + writer = PyPDF2.PdfWriter() + # Copy pages from the original PDF to the new PDF. + for page_num in range(len(reader2.pages)): + page = reader2.pages[page_num] + writer.add_page(page) + #writer.remove_links() # to remove comments. + buffer = BytesIO() + writer.write(buffer) + return buffer.getvalue() def __getzipfilepath(foldername, filename): diff --git a/docker-compose.yml b/docker-compose.yml index d3c57fd9a..790944e6a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,6 +18,7 @@ services: - REACT_APP_REDACTION_SELECT_LIMIT=${REDACTION_SELECT_LIMIT} - REACT_APP_BIG_HTTP_GET_TIMEOUT=${BIG_HTTP_GET_TIMEOUT} - REACT_APP_SESSION_SECURITY_KEY=${REACT_APP_SESSION_SECURITY_KEY} + - REACT_APP_REDLINE_OPACITY=${REACT_APP_REDLINE_OPACITY} volumes: - ".:/web" - "/web/node_modules" diff --git a/web/Dockerfile b/web/Dockerfile index 67fad4772..6f3229bd8 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -15,6 +15,7 @@ ARG REACT_APP_ANNOTATION_PAGE_SIZE ARG REACT_APP_PAGE_SELECT_LIMIT ARG REACT_APP_REDACTION_SELECT_LIMIT ARG REACT_APP_SESSION_SECURITY_KEY +ARG REACT_APP_REDLINE_OPACITY ENV NODE_ENV ${NODE_ENV} @@ -27,6 +28,7 @@ ENV REACT_APP_ANNOTATION_PAGE_SIZE ${REACT_APP_ANNOTATION_PAGE_SIZE} ENV REACT_APP_PAGE_SELECT_LIMIT ${REACT_APP_PAGE_SELECT_LIMIT} ENV REACT_APP_REDACTION_SELECT_LIMIT ${REACT_APP_REDACTION_SELECT_LIMIT} ENV REACT_APP_SESSION_SECURITY_KEY ${REACT_APP_SESSION_SECURITY_KEY} +ENV REACT_APP_REDLINE_OPACITY ${REACT_APP_REDLINE_OPACITY} # add `/app/node_modules/.bin` to $PATH ENV PATH /web/node_modules/.bin:$PATH diff --git a/web/Dockerfile.local b/web/Dockerfile.local index 728ec0f76..1747f09fd 100644 --- a/web/Dockerfile.local +++ b/web/Dockerfile.local @@ -17,6 +17,7 @@ ARG REACT_APP_PAGE_SELECT_LIMIT ARG REACT_APP_REDACTION_SELECT_LIMIT ARG REACT_APP_BIG_HTTP_GET_TIMEOUT ARG REACT_APP_SESSION_SECURITY_KEY +ARG REACT_APP_REDLINE_OPACITY ENV NODE_ENV ${NODE_ENV} ENV GENERATE_SOURCEMAP ${GENERATE_SOURCEMAP} @@ -29,6 +30,7 @@ ENV REACT_APP_PAGE_SELECT_LIMIT ${REACT_APP_PAGE_SELECT_LIMIT} ENV REACT_APP_REDACTION_SELECT_LIMIT ${REACT_APP_REDACTION_SELECT_LIMIT} ENV BIG_HTTP_GET_TIMEOUT ${REACT_APP_BIG_HTTP_GET_TIMEOUT} ENV REACT_APP_SESSION_SECURITY_KEY ${REACT_APP_SESSION_SECURITY_KEY} +ENV REACT_APP_REDLINE_OPACITY ${REACT_APP_REDLINE_OPACITY} # add `/app/node_modules/.bin` to $PATH ENV PATH /web/node_modules/.bin:$PATH diff --git a/web/src/components/FOI/Home/CreateResponsePDF/useSaveRedlineForSignOff.js b/web/src/components/FOI/Home/CreateResponsePDF/useSaveRedlineForSignOff.js index 682fb718d..e98cd1b03 100644 --- a/web/src/components/FOI/Home/CreateResponsePDF/useSaveRedlineForSignOff.js +++ b/web/src/components/FOI/Home/CreateResponsePDF/useSaveRedlineForSignOff.js @@ -822,7 +822,7 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => { let divCount = 0; const noofdivision = Object.keys(stitchlist).length; let stitchedDocObj = null; - setTotalStitchList(stitchlist) + // setTotalStitchList(stitchlist); //if you want to apply the solution to applyrotations at end of redline process uncomment this for (const [key, value] of Object.entries(stitchlist)) { divCount++; let docCount = 0; @@ -859,7 +859,10 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => { loadAsPDF: true, useDownloader: false, // Added to fix BLANK page issue }).then(async (docObj) => { + + // NOTE: applying rotations to records/documents for redlines is turned off per biz. If uncommented, bugs related to redline redactions (s14, NR etc) not being applied and in turn data breachs can occur // applyRotations(docObj, doc.attributes.rotatedpages) + //if (isIgnoredDocument(doc, docObj.getPageCount(), divisionDocuments) == false) { docCountCopy++; docCount++; @@ -1788,7 +1791,12 @@ const stampPageNumberRedline = async ( } for (const rect of rects) { let height = docViewer.getPageHeight(rect.vpageno); - rarr.push(await PDFNet.Redactor.redactionCreate(rect.pageno, (await PDFNet.Rect.init(rect.recto.x1,height-rect.recto.y1,rect.recto.x2,height-rect.recto.y2)), false, '')); + let pageRotation = stitchObject?.getPageRotation(rect.pageno); + let pageWidth = docViewer.getPageWidth(rect.vpageno); + /**Fix for oipc redline displaying s.14 marked page content partially */ + let adjustedRect = await getAdjustedRedactionCoordinates(pageRotation, rect.recto, PDFNet,pageWidth, height); + //rarr.push(await PDFNet.Redactor.redactionCreate(rect.pageno, (await PDFNet.Rect.init(rect.recto.x1,height-rect.recto.y1,rect.recto.x2,height-rect.recto.y2)), false, '')); + rarr.push(await PDFNet.Redactor.redactionCreate(rect.pageno, adjustedRect, false, '')); } if (rarr.length > 0) { const app = {}; @@ -1905,14 +1913,14 @@ const stampPageNumberRedline = async ( } //Consults - Redlines + Redactions (Redact S.NR) Block : End - // Rotate pages - for (const doc of totalStitchList[divisionid]) { - let documentlist = totalStitchList[divisionid]; - let divDocPageMappings = redlinepageMappings["divpagemappings"][divisionid]; - if(documentlist.length > 0) { - applyRotations(stitchObject, doc, divDocPageMappings); - } - } + // Rotate pages - applyrotations after all redline processes (redline applying, stamping, removing pages etc) are completed. This is a solution/option to apply the rotation of pages to redline pacakges (consults, oipc etc) without losing redactions and causing data breach of data that should be redacted. + // for (const doc of totalStitchList[divisionid]) { + // let documentlist = totalStitchList[divisionid]; + // let divDocPageMappings = redlinepageMappings["divpagemappings"][divisionid]; + // if(documentlist.length > 0) { + // applyRotations(stitchObject, doc, divDocPageMappings); + // } + // } stitchObject .getFileData({ @@ -1972,23 +1980,52 @@ const stampPageNumberRedline = async ( } }; - const applyRotations = (document, doc, divDocPageMappings) => { - const docPageMappings = divDocPageMappings[doc.documentid]; // {origPage: stitchedPage, origPage: stitchedPage} -> {2: 1, 3:2, 4:3} - const rotatedpages = doc.attributes.rotatedpages; // {origPage: rotation. origPage: rotations} -> {4: 180} - const rotatedStitchedPages = {}; - if (rotatedpages) { - for (let [originalPage, stitchedPage] of Object.entries(docPageMappings)) { - let rotation = rotatedpages[originalPage]; - if (rotation) { - rotatedStitchedPages[stitchedPage] = rotation; - } - } - for (let page in rotatedStitchedPages) { - let existingrotation = document.getPageRotation(page); - let rotation = (rotatedStitchedPages[page] - existingrotation + 360) / 90; - document.rotatePages([page], rotation); - } - } + // This is a solution/option to apply the rotation of pages to redline pacakges (consults, oipc etc) without losing redactions and causing data breach of data that should be redacted. + // const applyRotations = (document, doc, divDocPageMappings) => { + // const docPageMappings = divDocPageMappings[doc.documentid]; // {origPage: stitchedPage, origPage: stitchedPage} -> {2: 1, 3:2, 4:3} + // const rotatedpages = doc.attributes.rotatedpages; // {origPage: rotation. origPage: rotations} -> {4: 180} + // const rotatedStitchedPages = {}; + // if (rotatedpages) { + // for (let [originalPage, stitchedPage] of Object.entries(docPageMappings)) { + // let rotation = rotatedpages[originalPage]; + // if (rotation) { + // rotatedStitchedPages[stitchedPage] = rotation; + // } + // } + // for (let page in rotatedStitchedPages) { + // let existingrotation = document.getPageRotation(page); + // let rotation = (rotatedStitchedPages[page] - existingrotation + 360) / 90; + // document.rotatePages([page], rotation); + // } + // } + // } + + const getAdjustedRedactionCoordinates = async(pageRotation, recto, PDFNet,pageWidth,pageHeight) => { + let x1 = recto.x1; + let y1 = recto.y1; + let x2 = recto.x2; + let y2 = recto.y2; + // Adjust Y-coordinates to account for the flipped Y-axis in PDF + y1 = pageHeight - y1; + y2 = pageHeight - y2; + // Adjust for page rotation (90, 180, 270 degrees) + switch (pageRotation) { + case 90: + [x1, y1] = [y1, x1]; + [x2, y2] = [y2, x2]; + break; + case 180: + x1 = pageWidth - x1; + y1 = pageHeight - y1; + x2 = pageWidth - x2; + y2 = pageHeight - y2; + break; + case 270: + [x1, y1] = [pageHeight - y1, x1]; + [x2, y2] = [pageHeight - y2, x2]; + break; + } + return await PDFNet.Rect.init(x1, y1, x2, y2); } useEffect(() => { diff --git a/web/src/components/FOI/Home/CreateResponsePDF/useSaveResponsePackage.js b/web/src/components/FOI/Home/CreateResponsePDF/useSaveResponsePackage.js index 76c2613d9..071504cd6 100644 --- a/web/src/components/FOI/Home/CreateResponsePDF/useSaveResponsePackage.js +++ b/web/src/components/FOI/Home/CreateResponsePDF/useSaveResponsePackage.js @@ -267,7 +267,12 @@ const useSaveResponsePackage = () => { /**must apply redactions before removing pages*/ if (pagesToRemove.length > 0) { await doc.removePages(pagesToRemove); - } + } + doc.setWatermark({ + diagonal: { + text: '' + } + }) const { PDFNet } = _instance.Core; PDFNet.initialize(); diff --git a/web/src/components/FOI/Home/Redlining.js b/web/src/components/FOI/Home/Redlining.js index a4b784c5c..9d6e8719d 100644 --- a/web/src/components/FOI/Home/Redlining.js +++ b/web/src/components/FOI/Home/Redlining.js @@ -27,6 +27,7 @@ import { ANNOTATION_PAGE_SIZE, REDACTION_SELECT_LIMIT, BIG_HTTP_GET_TIMEOUT, + REDLINE_OPACITY, } from "../../../constants/constants"; import { errorToast } from "../../../helper/helper"; import { useAppSelector } from "../../../hooks/hook"; @@ -89,7 +90,7 @@ const Redlining = React.forwardRef( }, ref ) => { - const alpha = 0.6; + const alpha = REDLINE_OPACITY; const requestnumber = useAppSelector( (state) => state.documents?.requestnumber @@ -212,6 +213,7 @@ const Redlining = React.forwardRef( } = instance.Core; instance.UI.disableElements(PDFVIEWER_DISABLED_FEATURES.split(",")); instance.UI.enableElements(["attachmentPanelButton"]); + instance.UI.enableNoteSubmissionWithEnter(); documentViewer.setToolMode( documentViewer.getTool(instance.Core.Tools.ToolNames.REDACTION) ); @@ -1363,6 +1365,42 @@ const Redlining = React.forwardRef( if (docInstance && documentList.length > 0) { const document = docInstance?.UI.iframeWindow.document; document.getElementById("create_response_pdf").addEventListener("click", handleCreateResponsePDFClick); + docViewer?.setWatermark({ + // Draw custom watermark in middle of the document + custom: (ctx, pageNumber, pageWidth, pageHeight) => { + // ctx is an instance of CanvasRenderingContext2D + // https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D + // Hence being able to leverage those properties + let originalPage = pageMappedDocs['stitchedPageLookup'][pageNumber] + let doc = pageFlags.find(d => d.documentid === originalPage.docid); + let pageFlag = doc?.pageflag?.find(f => f.page === originalPage.page); + if (pageFlag?.flagid === pageFlagTypes["Duplicate"]) { + ctx.fillStyle = "#ff0000"; + ctx.font = "20pt Arial"; + ctx.globalAlpha = 0.4; + + ctx.save(); + ctx.translate(pageWidth / 2, pageHeight / 2); + ctx.rotate(-Math.PI / 4); + ctx.fillText("DUPLICATE", 0, 0); + ctx.restore(); + } + + if (pageFlag?.flagid === pageFlagTypes["Not Responsive"]) { + ctx.fillStyle = "#ff0000"; + ctx.font = "20pt Arial"; + ctx.globalAlpha = 0.4; + + ctx.save(); + ctx.translate(pageWidth / 2, pageHeight / 2); + ctx.rotate(-Math.PI / 4); + ctx.fillText("NOT RESPONSIVE", 0, 0); + ctx.restore(); + } + }, + }); + docViewer?.refreshAll(); + docViewer?.updateView(); } //Cleanup Function: removes previous event listeiner to ensure handleCreateResponsePDFClick event is not called multiple times on click return () => { diff --git a/web/src/constants/constants.ts b/web/src/constants/constants.ts index 0152fc2f3..1a76564a0 100644 --- a/web/src/constants/constants.ts +++ b/web/src/constants/constants.ts @@ -19,3 +19,4 @@ export const ANNOTATION_PAGE_SIZE = window._env_?.REACT_APP_ANNOTATION_PAGE_SIZE export const PAGE_SELECT_LIMIT = window._env_?.REACT_APP_PAGE_SELECT_LIMIT ?? process.env.REACT_APP_PAGE_SELECT_LIMIT ?? 250; export const REDACTION_SELECT_LIMIT = window._env_?.REACT_APP_REDACTION_SELECT_LIMIT ?? process.env.REACT_APP_REDACTION_SELECT_LIMIT ?? 250; export const BIG_HTTP_GET_TIMEOUT = window._env_?.REACT_APP_BIG_HTTP_GET_TIMEOUT ?? process.env.REACT_APP_BIG_HTTP_GET_TIMEOUT ?? 300000; +export const REDLINE_OPACITY = window._env_?.REACT_APP_REDLINE_OPACITY ?? process.env.REACT_APP_REDLINE_OPACITY ?? 0.5;