");
bodyreplaced = bodyreplaced.Insert(bodyStart.Index + bodyStart.Length, htmlString);
@@ -505,12 +538,24 @@ private string GenerateHtmlfromMsg(Storage.Message msg)
Subject:
" + msg.Subject + " ");
+ DateTime sentDate = Convert.ToDateTime(msg.SentOn);
+ if(sentDate == DateTime.MinValue)
+ {
+ sentDate = Convert.ToDateTime(msg.CreationTime);
+ }
+ if (TimeZone.CurrentTimeZone.StandardName != "Pacific Standard Time")
+ {
+
+ sentDate = TimeZoneInfo.ConvertTimeBySystemTimeZoneId(sentDate, "Pacific Standard Time");
+
+ }
+
//Message Sent On timestamp
htmlString.Append(@"
Sent:
- " + msg.SentOn + " ");
+
" + sentDate + " ");
+
-
//Message body
//string message = @"" + msg.BodyText?.Replace("\n", "
").Replace("<br>", "
")?.Replace("<br/>", "
");
diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/MSGFileProcessorTest.cs b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/MSGFileProcessorTest.cs
index 1e9848bc6..d4de41a4b 100644
--- a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/MSGFileProcessorTest.cs
+++ b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/MSGFileProcessorTest.cs
@@ -50,6 +50,8 @@ public void ProcessSimpleMSGFilesTest()
msgFileProcessor.FailureAttemptCount = 10;
(converted, message, output, attachments) = msgFileProcessor.ConvertToPDF();
Assert.IsTrue(converted == true, $"MSG to PDF Conversion failed for {testFile}");
+
+ SaveStreamAsFile(getSourceFolder(), output, "result_simple-test-msg-file.pdf");
}
[TestMethod]
@@ -68,7 +70,7 @@ public void ProcessMSGFileWithAttachmentsTest()
(converted, message, output, attachments) = msgFileProcessor.ConvertToPDF();
Assert.IsTrue(converted == true, $"MSG to PDF Conversion failed for {testFile}");
- SaveStreamAsFile(getSourceFolder(), output, "result.pdf");
+ SaveStreamAsFile(getSourceFolder(), output, "result_Test-MSG-File-with-Attachments.pdf");
bool isAttachmentsExists = attachments.Count == 3;
Assert.IsTrue(isAttachmentsExists, $"MSG PDF file does not exists {testFile}");
diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/Test-MSG-File-with-Attachments.pdf b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/Test-MSG-File-with-Attachments.pdf
deleted file mode 100644
index 43ef48bfa..000000000
Binary files a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/Test-MSG-File-with-Attachments.pdf and /dev/null differ
diff --git a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/simple-test-msg-file.pdf b/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/simple-test-msg-file.pdf
deleted file mode 100644
index 36d63a55b..000000000
Binary files a/MCS.FOI.S3FileConversion/MCS.FOI.MSGToPDFUnitTests/SourceFiles/simple-test-msg-file.pdf and /dev/null differ
diff --git a/api/migrations/versions/9d45ce57481e_.py b/api/migrations/versions/9d45ce57481e_.py
new file mode 100644
index 000000000..03e94fea5
--- /dev/null
+++ b/api/migrations/versions/9d45ce57481e_.py
@@ -0,0 +1,35 @@
+"""empty message
+
+Revision ID: 9d45ce57481e
+Revises: 18a45d1b33cc
+Create Date: 2024-06-06 10:19:45.739225
+
+"""
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision = '9d45ce57481e'
+down_revision = '18a45d1b33cc'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ op.create_table('PDFStitchJobAttributes',
+ sa.Column('attributesid', sa.Integer(), autoincrement=True, nullable=False),
+ sa.Column('pdfstitchjobid', sa.Integer(), nullable=False),
+ sa.Column('version', sa.Integer(), nullable=False),
+ sa.Column('ministryrequestid', sa.Integer(), nullable=False),
+ sa.Column('attributes', postgresql.JSON(astext_type=sa.Text()), nullable=False),
+ sa.Column('createdat', sa.TIMESTAMP, nullable=False, server_default=sa.func.now()),
+ sa.Column('createdby', sa.String(length=120), nullable=True),
+ sa.PrimaryKeyConstraint('attributesid'),
+ sa.ForeignKeyConstraint(['pdfstitchjobid', 'version'], ['PDFStitchJob.pdfstitchjobid', 'PDFStitchJob.version'], )
+ )
+
+
+def downgrade():
+ op.drop_table('PDFStitchJobAttributes')
+
diff --git a/api/reviewer_api/models/DeduplicationJob.py b/api/reviewer_api/models/DeduplicationJob.py
index 81699e690..0258ed6c0 100644
--- a/api/reviewer_api/models/DeduplicationJob.py
+++ b/api/reviewer_api/models/DeduplicationJob.py
@@ -64,12 +64,12 @@ def getdedupestatus(cls, ministryrequestid):
executions = []
try:
sql = """select distinct on (deduplicationjobid) deduplicationjobid, version,
- filename, status, documentmasterid, trigger
+ filename, status, documentmasterid, trigger, message
from "DeduplicationJob" fcj where ministryrequestid = :ministryrequestid
order by deduplicationjobid, "version" desc"""
rs = db.session.execute(text(sql), {'ministryrequestid': ministryrequestid})
for row in rs:
- executions.append({"deduplicationjobid": row["deduplicationjobid"], "version": row["version"], "filename": row["filename"], "status": row["status"], "documentmasterid": row["documentmasterid"], "trigger":row["trigger"]})
+ executions.append({"deduplicationjobid": row["deduplicationjobid"], "version": row["version"], "filename": row["filename"], "status": row["status"], "documentmasterid": row["documentmasterid"], "trigger":row["trigger"], "message": row["message"]})
except Exception as ex:
logging.error(ex)
db.session.close()
diff --git a/api/reviewer_api/models/PDFStitchJobAttributes.py b/api/reviewer_api/models/PDFStitchJobAttributes.py
new file mode 100644
index 000000000..899a7199a
--- /dev/null
+++ b/api/reviewer_api/models/PDFStitchJobAttributes.py
@@ -0,0 +1,66 @@
+from .db import db, ma
+from datetime import datetime as datetime2
+from sqlalchemy.dialects.postgresql import JSON
+from sqlalchemy import func, and_
+from .default_method_result import DefaultMethodResult
+from .DocumentDeleted import DocumentDeleted
+from .DocumentMaster import DocumentMaster
+import logging
+
+
+class PDFStitchJobAttributes(db.Model):
+ __tablename__ = "PDFStitchJobAttributes"
+ # Defining the columns
+ attributesid = db.Column(db.Integer, primary_key=True, autoincrement=True)
+ pdfstitchjobid = db.Column(db.Integer, db.ForeignKey("PDFStitchJob.pdfstitchjobid"))
+ version = db.Column(db.Integer, db.ForeignKey("PDFStitchJob.version"))
+ ministryrequestid = db.Column(db.Integer, nullable=False)
+ attributes = db.Column(JSON, unique=False, nullable=False)
+ createdat = db.Column(db.DateTime, default=datetime2.now, nullable=False)
+ createdby = db.Column(db.String(120), nullable=False)
+
+
+ @classmethod
+ def insert(cls, row):
+ try:
+ db.session.add(row)
+ db.session.commit()
+ return DefaultMethodResult(
+ True,
+ "PDF Stitch Job Attributes recorded for ministryrequestid: {0}".format(
+ row.ministryrequestid
+ ),
+ row.pdfstitchjobid,
+ )
+ except Exception as ex:
+ logging.error(ex)
+ finally:
+ db.session.close()
+
+ @classmethod
+ def getpdfstitchjobattributesbyid(cls, requestid):
+ try:
+ pdfstitchjobattributesschema = PDFStitchJobAttributesSchema(many=False)
+ query = db.session.query(PDFStitchJobAttributes).filter(
+ PDFStitchJobAttributes.ministryrequestid == requestid
+ ).first()
+ return pdfstitchjobattributesschema.dump(query)
+ except Exception as ex:
+ logging.error(ex)
+ finally:
+ db.session.close()
+
+
+
+
+class PDFStitchJobAttributesSchema(ma.Schema):
+ class Meta:
+ fields = (
+ "attributesid",
+ "pdfstitchjobid",
+ "version",
+ "ministryrequestid",
+ "attributes",
+ "createdat",
+ "createdby",
+ )
diff --git a/api/reviewer_api/resources/document.py b/api/reviewer_api/resources/document.py
index 2cd9b7d8c..9aba5559f 100644
--- a/api/reviewer_api/resources/document.py
+++ b/api/reviewer_api/resources/document.py
@@ -29,6 +29,7 @@
from reviewer_api.services.documentservice import documentservice
from reviewer_api.services.docdeletedpageservice import docdeletedpageservice
+from reviewer_api.services.jobrecordservice import jobrecordservice
API = Namespace('Document Services', description='Endpoints for deleting and replacing documents')
TRACER = Tracer.get_instance()
@@ -122,10 +123,17 @@ def get(requestid):
response.raise_for_status()
# get request status
jsonobj = response.json()
+ balancefeeoverrodforrequest = jobrecordservice().isbalancefeeoverrodforrequest(requestid)
+ outstandingbalance=0
+ if 'cfrfee' in jsonobj and 'feedata' in jsonobj['cfrfee'] and "balanceDue" in jsonobj['cfrfee']['feedata']:
+ outstandingbalancestr = jsonobj['cfrfee']['feedata']["balanceDue"]
+ outstandingbalance = float(outstandingbalancestr)
requestinfo = {
"bcgovcode": jsonobj["bcgovcode"],
"requesttype": jsonobj["requestType"],
"validoipcreviewlayer": documentservice().validate_oipcreviewlayer(jsonobj, requestid),
+ "outstandingbalance": outstandingbalance,
+ "balancefeeoverrodforrequest": balancefeeoverrodforrequest
}
documentdivisionslist,result = documentservice().getdocuments(requestid, requestinfo["bcgovcode"])
return json.dumps({"requeststatuslabel": jsonobj["requeststatuslabel"], "documents": result, "requestnumber":jsonobj["axisRequestId"], "requestinfo":requestinfo, "documentdivisions":documentdivisionslist}), 200
diff --git a/api/reviewer_api/resources/foiflowmasterdata.py b/api/reviewer_api/resources/foiflowmasterdata.py
index c42af161b..1d50eb851 100644
--- a/api/reviewer_api/resources/foiflowmasterdata.py
+++ b/api/reviewer_api/resources/foiflowmasterdata.py
@@ -172,7 +172,8 @@ class FOIFlowS3PresignedRedline(Resource):
def post(ministryrequestid, redactionlayer="redline", layertype="redline"):
try:
data = request.get_json()
- requesttype = data["divdocumentList"]
+ # print("data!:",data)
+ requesttype = data["requestType"]
documentmapper = redactionservice().getdocumentmapper(
data["divdocumentList"][0]["documentlist"][0]["filepath"].split("/")[3]
)
@@ -199,17 +200,27 @@ def post(ministryrequestid, redactionlayer="redline", layertype="redline"):
packagetype = "redline"
if redactionlayer == "oipc":
packagetype = "oipcreview" if layertype == "oipcreview" else "oipcredline"
-
+ if layertype == "consult":
+ packagetype = "consult"
+
+ #check if is single redline package
+ is_single_redline = is_single_redline_package(_bcgovcode, packagetype, requesttype)
+ # print("is_single_redline:",is_single_redline)
+ #print("divdocumentList:",data["divdocumentList"])
for div in data["divdocumentList"]:
if len(div["documentlist"]) > 0:
+ # print("filepathlist:" , div["documentlist"][0]["filepath"])
filepathlist = div["documentlist"][0]["filepath"].split("/")[4:]
- if is_single_redline_package(_bcgovcode, packagetype, requesttype) == False:
+ if is_single_redline == False:
division_name = div["divisionname"]
# generate save url for stitched file
filepath_put = "{0}/{2}/{1}/{0} - {2} - {1}.pdf".format(
filepathlist[0], division_name, packagetype
)
-
+ if packagetype == "consult":
+ filepath_put = "{0}/{2}/{2} - {1} - {0}.pdf".format(
+ filepathlist[0], division_name, 'Consult'
+ )
s3path_save = s3client.generate_presigned_url(
ClientMethod="get_object",
Params={
@@ -261,14 +272,17 @@ def post(ministryrequestid, redactionlayer="redline", layertype="redline"):
)
elif len(div["incompatableList"]) > 0:
filepathlist = div["incompatableList"][0]["filepath"].split("/")[4:]
- if is_single_redline_package(_bcgovcode, packagetype, requesttype) and singlepkgpath is None :
+ if is_single_redline and singlepkgpath is None :
if len(div["documentlist"]) > 0 or len(div["incompatableList"]) > 0:
div = data["divdocumentList"][0]
filepathlist = div["documentlist"][0]["filepath"].split("/")[4:]
+ # print("filepathlist:",filepathlist)
filename = filepathlist[0]
+ # print("filename1:",filename)
filepath_put = "{0}/{2}/{1}-Redline.pdf".format(
filepathlist[0],filename, packagetype
)
+ # print("filepath_put:",filepath_put)
s3path_save = s3client.generate_presigned_url(
ClientMethod="get_object",
Params={
@@ -279,10 +293,11 @@ def post(ministryrequestid, redactionlayer="redline", layertype="redline"):
ExpiresIn=3600,
HttpMethod="PUT",
)
+ # print("s3path_save:",s3path_save)
singlepkgpath = s3path_save
data["s3path_save"] = s3path_save
- if is_single_redline_package(_bcgovcode, packagetype, requesttype):
+ if is_single_redline:
for div in data["divdocumentList"]:
if len(div["documentlist"]) > 0:
documentlist_copy = div["documentlist"][:]
@@ -301,7 +316,7 @@ def post(ministryrequestid, redactionlayer="redline", layertype="redline"):
data["requestnumber"] = filepathlist[0]
data["bcgovcode"] = _bcgovcode
- data["issingleredlinepackage"] = "Y" if is_single_redline_package(_bcgovcode, packagetype, requesttype) else "N"
+ data["issingleredlinepackage"] = "Y" if is_single_redline else "N"
return json.dumps(data), 200
except BusinessException as exception:
return {"status": exception.status_code, "message": exception.message}, 500
diff --git a/api/reviewer_api/resources/redaction.py b/api/reviewer_api/resources/redaction.py
index 4b633b1f3..0e1f20195 100644
--- a/api/reviewer_api/resources/redaction.py
+++ b/api/reviewer_api/resources/redaction.py
@@ -231,7 +231,7 @@ class AnnotationMetadata(Resource):
@staticmethod
@TRACER.trace()
@cross_origin(origins=allowedorigins())
- @auth.require
+ #@auth.require
def get(ministryrequestid, redactionlayer):
try:
result = redactionservice().getannotationinfobyrequest(ministryrequestid, redactionlayer)
@@ -342,7 +342,7 @@ def post():
try:
requestjson = request.get_json()
print("\nrequestjson:",requestjson)
- if(requestjson['bcgovcode'] == "mcf"):
+ if(requestjson['bcgovcode'] == "mcf" and requestjson['requesttype'] == "personal"):
finalpackageschema = MCFFinalPackageSchema().load(requestjson)
else:
finalpackageschema = FinalPackageSchema().load(requestjson)
diff --git a/api/reviewer_api/schemas/finalpackage.py b/api/reviewer_api/schemas/finalpackage.py
index 94f5f0b4b..bfb334589 100644
--- a/api/reviewer_api/schemas/finalpackage.py
+++ b/api/reviewer_api/schemas/finalpackage.py
@@ -10,6 +10,8 @@ class FileSchema(Schema):
class AttributeSchema(Schema):
files = fields.Nested(FileSchema, many=True, required=True, allow_none=False)
+class FeeOverrideSchema(Schema):
+ feeoverridereason = fields.Str(data_key="feeoverridereason", allow_none=True)
class SummaryPkgSchema(Schema):
divisionid = fields.Int(data_key="divisionid", allow_none=True)
@@ -30,6 +32,9 @@ class FinalPackageSchema(Schema):
)
summarydocuments = fields.Nested(SummarySchema, allow_none=True)
redactionlayerid = fields.Int(data_key="redactionlayerid", allow_none=False)
+ pdfstitchjobattributes = fields.Nested(FeeOverrideSchema, allow_none=True, many=False)
+ requesttype = fields.Str(data_key="requesttype", allow_none=False)
+ pdfstitchjobattributes = fields.Nested(FeeOverrideSchema, allow_none=True, many=False)
class SummaryRecordSchema(Schema):
recordname = fields.Str(data_key="recordname", allow_none=True)
@@ -53,4 +58,6 @@ class MCFFinalPackageSchema(Schema):
AttributeSchema, many=True, required=True, allow_none=False
)
summarydocuments = fields.Nested(MCFSummarySchema, allow_none=True)
- redactionlayerid = fields.Int(data_key="redactionlayerid", allow_none=False)
\ No newline at end of file
+ redactionlayerid = fields.Int(data_key="redactionlayerid", allow_none=False)
+ pdfstitchjobattributes = fields.Nested(FeeOverrideSchema, allow_none=True, many=False)
+ requesttype = fields.Str(data_key="requesttype", allow_none=False)
diff --git a/api/reviewer_api/schemas/redline.py b/api/reviewer_api/schemas/redline.py
index 27db2aa60..bdd484b3d 100644
--- a/api/reviewer_api/schemas/redline.py
+++ b/api/reviewer_api/schemas/redline.py
@@ -29,4 +29,5 @@ class RedlineSchema(Schema):
AttributeSchema, many=True, required=True, allow_none=False
)
summarydocuments = fields.Nested(SummarySchema, allow_none=True)
- redactionlayerid = fields.Int(data_key="redactionlayerid", allow_none=False)
\ No newline at end of file
+ redactionlayerid = fields.Int(data_key="redactionlayerid", allow_none=False)
+ requesttype = fields.Str(data_key="requesttype", allow_none=False)
\ No newline at end of file
diff --git a/api/reviewer_api/services/documentservice.py b/api/reviewer_api/services/documentservice.py
index 33170c4ba..c11dd6db9 100644
--- a/api/reviewer_api/services/documentservice.py
+++ b/api/reviewer_api/services/documentservice.py
@@ -251,6 +251,7 @@ def __updatededupestatus(self, dedupes, record):
record["deduplicationstatus"] = dedupe["status"]
record["filename"] = dedupe["filename"]
record["trigger"] = dedupe["trigger"]
+ record["message"] = dedupe["message"]
return record
def __updateproperties_old(self, properties, records, record):
@@ -417,7 +418,6 @@ def updatedocumentattributes(self, payload, userid):
if 'rotatedpages' not in newdocattributes:
newdocattributes['rotatedpages'] = {}
newdocattributes['rotatedpages'].update(payload["rotatedpages"])
- newdocattributes["divisions"] = payload["divisions"]
newRows.append(
DocumentAttributes(
version=docattributes["version"] + 1,
diff --git a/api/reviewer_api/services/jobrecordservice.py b/api/reviewer_api/services/jobrecordservice.py
index 587c94bc4..09a74039e 100644
--- a/api/reviewer_api/services/jobrecordservice.py
+++ b/api/reviewer_api/services/jobrecordservice.py
@@ -6,6 +6,7 @@
from reviewer_api.models.DocumentAttributes import DocumentAttributes
from reviewer_api.services.annotationservice import annotationservice
from reviewer_api.services.documentpageflagservice import documentpageflagservice
+from reviewer_api.models.PDFStitchJobAttributes import PDFStitchJobAttributes
from reviewer_api.auth import auth, AuthHelper
from datetime import datetime as datetime2
from reviewer_api.utils.constants import FILE_CONVERSION_FILE_TYPES, DEDUPE_FILE_TYPES
@@ -129,3 +130,19 @@ def insertpagecalculatorjobstatus(self, message, userid):
)
job = PageCalculatorJob.insert(row)
return job
+
+ def insertfeeoverridereason(self, message, pdfstitchjobid, userid):
+ row = PDFStitchJobAttributes(
+ pdfstitchjobid=pdfstitchjobid,
+ version=1,
+ ministryrequestid=message['ministryrequestid'],
+ attributes=message['pdfstitchjobattributes'],
+ createdby=userid
+ )
+ job = PDFStitchJobAttributes.insert(row)
+ return job
+
+ def isbalancefeeoverrodforrequest(self, requestid):
+ pdfstitchjobattributes= PDFStitchJobAttributes().getpdfstitchjobattributesbyid(requestid)
+ isbalancefeeoverrode= False if pdfstitchjobattributes is None or not pdfstitchjobattributes else True
+ return isbalancefeeoverrode
diff --git a/api/reviewer_api/services/radactionservice.py b/api/reviewer_api/services/radactionservice.py
index f12b53969..cb064ad48 100644
--- a/api/reviewer_api/services/radactionservice.py
+++ b/api/reviewer_api/services/radactionservice.py
@@ -124,6 +124,11 @@ def triggerdownloadredlinefinalpackage(self, finalpackageschema, userinfo):
_jobmessage, userinfo["userid"]
)
if job.success:
+ if 'pdfstitchjobattributes' in finalpackageschema and finalpackageschema['pdfstitchjobattributes'] is not None:
+ if 'feeoverridereason' in finalpackageschema['pdfstitchjobattributes']:
+ feeoverridereason= finalpackageschema['pdfstitchjobattributes']['feeoverridereason']
+ if feeoverridereason is not None and feeoverridereason != '':
+ jobrecordservice().insertfeeoverridereason(finalpackageschema,job.identifier,userinfo["userid"])
_message = self.__preparemessageforsummaryservice(
finalpackageschema, userinfo, job
)
@@ -131,6 +136,14 @@ def triggerdownloadredlinefinalpackage(self, finalpackageschema, userinfo):
# redline/final package download: prepare message for zipping service
def __preparemessageforsummaryservice(self, messageschema, userinfo, job):
+ feeoverridereason= ''
+ pdf_stitch_job_attributes = None
+ if 'pdfstitchjobattributes' in messageschema:
+ pdf_stitch_job_attributes = to_json(messageschema['pdfstitchjobattributes'])
+ if pdf_stitch_job_attributes is not None:
+ feeoverridereason= json.loads(pdf_stitch_job_attributes).get("feeoverridereason", None)
+ if feeoverridereason is not None and feeoverridereason != '':
+ feeoverridereason= userinfo["firstname"]+" "+userinfo["lastname"]+" overrode balance outstanding warning for the following reason: "+feeoverridereason
_message = {
"jobid": job.identifier,
"requestid": -1,
@@ -145,7 +158,10 @@ def __preparemessageforsummaryservice(self, messageschema, userinfo, job):
"finaloutput": to_json(""),
"attributes": to_json(messageschema["attributes"]),
"summarydocuments": json.dumps(messageschema["summarydocuments"]),
- "redactionlayerid": json.dumps(messageschema["redactionlayerid"])
+ "redactionlayerid": json.dumps(messageschema["redactionlayerid"]),
+ "feeoverridereason":feeoverridereason,
+ "requesttype": messageschema["requesttype"],
+ "feeoverridereason":feeoverridereason
}
return _message
diff --git a/api/reviewer_api/utils/enums.py b/api/reviewer_api/utils/enums.py
index fec94ac70..00f5682b1 100644
--- a/api/reviewer_api/utils/enums.py
+++ b/api/reviewer_api/utils/enums.py
@@ -35,6 +35,7 @@ class MinistryTeamWithKeycloackGroup(Enum):
AGR = "AGR Ministry Team"
AG = "AG Ministry Team"
BRD = "BRD Ministry Team"
+ CAF = "CAF Ministry Team"
CAS = "CAS Ministry Team"
CITZ = "CITZ Ministry Team"
CLB = "CLB Ministry Team"
@@ -73,7 +74,8 @@ class MinistryTeamWithKeycloackGroup(Enum):
ECC = "ECC Ministry Team"
JED = "JED Ministry Team"
COR = "COR Ministry Team"
- HSG = "HSG Ministry Team"
+ HSG = "HSG Ministry Team",
+ LSB = "LSB Ministry Team"
@staticmethod
def list():
diff --git a/api/reviewer_api/utils/util.py b/api/reviewer_api/utils/util.py
index 68414ac3d..796561c9f 100644
--- a/api/reviewer_api/utils/util.py
+++ b/api/reviewer_api/utils/util.py
@@ -137,6 +137,8 @@ def getbatchconfig():
return _begin, _size, _limit
def is_single_redline_package(bcgovcode, packagetype, requesttype):
+ if packagetype == "consult":
+ return False
if (packagetype == "oipcreview"):
return True
if REDLINE_SINGLE_PKG_MINISTRIES not in (None, ""):
@@ -145,6 +147,6 @@ def is_single_redline_package(bcgovcode, packagetype, requesttype):
return True
if REDLINE_SINGLE_PKG_MINISTRIES_PERSONAL not in (None, ""):
_pkg_ministries_personal = REDLINE_SINGLE_PKG_MINISTRIES_PERSONAL.replace(" ", "").split(',')
- if bcgovcode.upper() in _pkg_ministries_personal:
+ if bcgovcode.upper() in _pkg_ministries_personal and requesttype.upper() == "PERSONAL":
return True
return False
\ No newline at end of file
diff --git a/computingservices/DedupeServices/requirements.txt b/computingservices/DedupeServices/requirements.txt
index ca7e1b33c..e51e508cd 100644
Binary files a/computingservices/DedupeServices/requirements.txt and b/computingservices/DedupeServices/requirements.txt differ
diff --git a/computingservices/DedupeServices/services/dedupeservice.py b/computingservices/DedupeServices/services/dedupeservice.py
index 0799beea4..a4af991b6 100644
--- a/computingservices/DedupeServices/services/dedupeservice.py
+++ b/computingservices/DedupeServices/services/dedupeservice.py
@@ -21,4 +21,4 @@ def processmessage(message):
documentspagecalculatorproducerservice().producepagecalculatorevent(pagecalculatormessage, _pagecount, pagecalculatorjobid)
except(Exception) as error:
print("Exception while processing redis message, func processmessage(p3), Error : {0} ".format(error))
- recordjobend(message, True, traceback.format_exc())
\ No newline at end of file
+ recordjobend(message, True, error.args[0])
\ No newline at end of file
diff --git a/computingservices/DedupeServices/services/s3documentservice.py b/computingservices/DedupeServices/services/s3documentservice.py
index 6f4d46047..2129117a4 100644
--- a/computingservices/DedupeServices/services/s3documentservice.py
+++ b/computingservices/DedupeServices/services/s3documentservice.py
@@ -12,7 +12,11 @@
from html import escape
import hashlib
import uuid
+import boto3
+from botocore.config import Config
from re import sub
+import fitz
+import PyPDF2
from utils import (
gets3credentialsobject,
getdedupeproducermessage,
@@ -49,6 +53,49 @@ def __getcredentialsbybcgovcode(bcgovcode):
return s3cred
+def _prepareattachment(producermessage, data, s3uripath, file_name):
+ attachment = {
+ "filename": escape(sub("<[0-9]+>", "", file_name, 1)),
+ "s3uripath": s3uripath,
+ "attributes": deepcopy(producermessage.attributes),
+ }
+ attachment["attributes"]["filesize"] = len(data)
+ attachment["attributes"][
+ "parentpdfmasterid"
+ ] = producermessage.documentmasterid
+ attachment["attributes"].pop("batch")
+ attachment["attributes"].pop("extension")
+ attachment["attributes"].pop("incompatible")
+ return attachment
+
+def _generate_file_attachments(producermessage, reader, auth):
+ file_attachments = []
+ for page in reader.pages:
+ if "/Annots" in page:
+ annotations = page["/Annots"]
+ for annotation in annotations:
+ subtype = annotation.get_object()["/Subtype"]
+ if subtype == "/FileAttachment":
+ # Placeholder logic to handle pdf attachments+embedds. Once resources available to revise feature, and extract attachments + embedds into one new parent PDF, this error handling will be removed.
+ raise Exception("PDF contains attachments and/or embedded files. File must be manually fixed and replaced")
+
+ # Old logic to extract embedded files. Uncomment when new feature to save pdf embedds + attachemnts as one file is started.
+ # producermessage.attributes["hasattachment"] = True
+ # fileobj = annotation.get_object()["/FS"]
+ # file = fileobj["/F"]
+ # data = fileobj["/EF"]["/F"].get_data()
+ # # data = BytesIO(data).getvalue()
+ # s3uripath = (
+ # path.splitext(producermessage.s3filepath)[0]
+ # + "/"
+ # + "{0}{1}".format(uuid.uuid4(), path.splitext(file)[1])
+ # )
+ # uploadresponse = requests.put(s3uripath, data=data, auth=auth)
+ # uploadresponse.raise_for_status()
+ # attachment = _prepareattachment(producermessage, data, s3uripath, file)
+ # file_attachments.append(attachment)
+ return file_attachments
+
def gets3documenthashcode(producermessage):
s3credentials = __getcredentialsbybcgovcode(producermessage.bcgovcode)
s3_access_key_id = s3credentials.s3accesskey
@@ -85,7 +132,11 @@ def gets3documenthashcode(producermessage):
if "/Collection" in reader.trailer["/Root"]:
producermessage.attributes["isportfolio"] = True
else:
- producermessage.attributes["hasattachment"] = True
+ # Placeholder logic to handle pdf attachments+embedds. Once resources available to revise feature, and extract attachments + embedds into one new parent PDF, this error handling will be removed.
+ raise Exception("PDF contains attachments and/or embedded files. File must be manually fixed and replaced")
+
+ # Old logic to extract attached files. Uncomment when new feature to save pdf embedds + attachemnts as one file is started.
+ # producermessage.attributes["hasattachment"] = True
for name in reader.attachments:
s3uripath = (
path.splitext(filepath)[0]
@@ -95,18 +146,7 @@ def gets3documenthashcode(producermessage):
data = b"".join(reader.attachments[name])
uploadresponse = requests.put(s3uripath, data=data, auth=auth)
uploadresponse.raise_for_status()
- attachment = {
- "filename": escape(sub("<[0-9]+>", "", name, 1)),
- "s3uripath": s3uripath,
- "attributes": deepcopy(producermessage.attributes),
- }
- attachment["attributes"]["filesize"] = len(data)
- attachment["attributes"][
- "parentpdfmasterid"
- ] = producermessage.documentmasterid
- attachment["attributes"].pop("batch")
- attachment["attributes"].pop("extension")
- attachment["attributes"].pop("incompatible")
+ attachment = _prepareattachment(producermessage, data, s3uripath, name)
attachments.append(attachment)
saveresponse = requests.post(
request_management_api
@@ -119,6 +159,57 @@ def gets3documenthashcode(producermessage):
},
)
saveresponse.raise_for_status()
+
+ # New logic to extract embedded file attachments (classified under annotations in the PDF) from pages in PDF
+ # Before looping of pdf pages started; confirm if annotations exist in the pdf using pyMuPdf library (fitz)
+ fitz_reader = fitz.open(stream=BytesIO(response.content), filetype="pdf")
+ if (fitz_reader.has_annots()):
+ file_attachments = _generate_file_attachments(producermessage, reader, auth)
+ if (len(file_attachments) > 0):
+ saveresponse = requests.post(
+ request_management_api
+ + "/api/foirecord/-1/ministryrequest/"
+ + producermessage.ministryrequestid,
+ data=json.dumps({"records": file_attachments}),
+ headers={
+ "Authorization": producermessage.usertoken,
+ "Content-Type": "application/json",
+ }
+ )
+ saveresponse.raise_for_status()
+ fitz_reader.close()
+
+ # clear metadata
+ reader2 = PyPDF2.PdfReader(BytesIO(response.content))
+ # Check if metadata exists.
+ if reader2.metadata is not None:
+ # Create a new PDF file without metadata.
+ writer = PyPDF2.PdfWriter()
+ # Copy pages from the original PDF to the new PDF.
+ for page_num in range(len(reader.pages)):
+ page = reader2.pages[page_num]
+ writer.add_page(page)
+ #writer.remove_links() # to remove comments.
+ buffer = BytesIO()
+ writer.write(buffer)
+ client = boto3.client('s3',config=Config(signature_version='s3v4'),
+ endpoint_url='https://{0}/'.format(dedupe_s3_host),
+ aws_access_key_id= s3_access_key_id,
+ aws_secret_access_key= s3_secret_access_key,
+ region_name= dedupe_s3_region
+ )
+ copyresponse = client.copy_object(
+ CopySource="/" + "/".join(filepath.split("/")[3:]), # /Bucket-name/path/filename
+ Bucket=filepath.split("/")[3], # Destination bucket
+ Key= "/".join(filepath.split("/")[4:])[:-4] + 'ORIGINAL' + '.pdf' # Destination path/filename
+ )
+ uploadresponse = requests.put(
+ filepath,
+ data=buffer.getvalue(),
+ auth=auth
+ )
+ uploadresponse.raise_for_status()
+
elif extension.lower() in file_conversion_types:
# "Extension different {0}, so need to download pdf here for pagecount!!".format(extension))
pdfresponseofconverted = requests.get(
diff --git a/computingservices/DedupeServices/utils/foidedupeconfig.py b/computingservices/DedupeServices/utils/foidedupeconfig.py
index 6e648af28..a51710bc2 100644
--- a/computingservices/DedupeServices/utils/foidedupeconfig.py
+++ b/computingservices/DedupeServices/utils/foidedupeconfig.py
@@ -18,7 +18,6 @@
dedupe_db_user = os.getenv("DEDUPE_DB_USER")
dedupe_db_password = os.getenv("DEDUPE_DB_PASSWORD")
-dedupe_s3_host = os.getenv("DEDUPE_S3_HOST")
dedupe_s3_host = os.getenv("DEDUPE_S3_HOST")
dedupe_s3_region = os.getenv("DEDUPE_S3_REGION")
dedupe_s3_service = os.getenv("DEDUPE_S3_SERVICE")
diff --git a/computingservices/DocumentServices/rstreamio/message/schemas/redactionsummary.py b/computingservices/DocumentServices/rstreamio/message/schemas/redactionsummary.py
index 979e4d18f..b3a6d67f5 100644
--- a/computingservices/DocumentServices/rstreamio/message/schemas/redactionsummary.py
+++ b/computingservices/DocumentServices/rstreamio/message/schemas/redactionsummary.py
@@ -30,7 +30,7 @@ def __init__(self, sorteddocuments, pkgdocuments) -> None:
class RedactionSummaryMessage(object):
def __init__(self, jobid, requestid, ministryrequestid, category, requestnumber,
- bcgovcode, createdby, filestozip, finaloutput, attributes, summarydocuments ,redactionlayerid) -> None:
+ bcgovcode, createdby, filestozip, finaloutput, attributes, summarydocuments ,redactionlayerid, requesttype, feeoverridereason) -> None:
self.jobid = jobid
self.requestid = requestid
self.ministryrequestid = ministryrequestid
@@ -43,6 +43,9 @@ def __init__(self, jobid, requestid, ministryrequestid, category, requestnumber,
self.attributes = attributes
self.summarydocuments = summarydocuments
self.redactionlayerid = redactionlayerid
+ self.feeoverridereason = feeoverridereason
+ self.requesttype = requesttype
+ self.feeoverridereason = feeoverridereason
def get_in_redactionsummary_msg(producer_json):
diff --git a/computingservices/DocumentServices/services/dts/redactionsummary.py b/computingservices/DocumentServices/services/dts/redactionsummary.py
index fcff74a52..bb08b2a84 100644
--- a/computingservices/DocumentServices/services/dts/redactionsummary.py
+++ b/computingservices/DocumentServices/services/dts/redactionsummary.py
@@ -2,15 +2,17 @@
from rstreamio.message.schemas.redactionsummary import get_in_summary_object,get_in_summarypackage_object
import json
from collections import defaultdict
+import traceback
class redactionsummary():
def prepareredactionsummary(self, message, documentids, pageflags, programareas):
- if message.bcgovcode == 'mcf':
+ _ismcfpersonalrequest = True if message.bcgovcode == 'mcf' and message.requesttype == 'personal' else False
+ if _ismcfpersonalrequest and message.category == "responsepackage":
redactionsummary = self.__packagesummaryforcfdrequests(message, documentids)
else:
redactionsummary = self.__packaggesummary(message, documentids, pageflags, programareas)
- if message.category == "responsepackage" and message.bcgovcode != 'mcf':
+ if message.category == "responsepackage" and _ismcfpersonalrequest == False:
consolidated_redactions = []
for entry in redactionsummary['data']:
consolidated_redactions += entry['sections']
@@ -32,7 +34,8 @@ def __packaggesummary(self, message, documentids, pageflags, programareas):
ordereddocids = summaryobject.sorteddocuments
stitchedpagedata = documentpageflag().getpagecount_by_documentid(message.ministryrequestid, ordereddocids)
totalpagecount = self.__calculate_totalpages(stitchedpagedata)
- print("\ntotalpagecount",totalpagecount)
+ print("\n __packaggesummary stitchedpagedata",stitchedpagedata)
+ print("\n __packaggesummary totalpagecount",totalpagecount)
if totalpagecount <=0:
return
@@ -40,33 +43,49 @@ def __packaggesummary(self, message, documentids, pageflags, programareas):
print("\n_pageflags",_pageflags)
summarydata = []
docpageflags = documentpageflag().get_documentpageflag(message.ministryrequestid, redactionlayerid, ordereddocids)
+ print("\n docpageflags",docpageflags)
deletedpages = self.__getdeletedpages(message.ministryrequestid, ordereddocids)
skippages= []
pagecount = 0
- for docid in ordereddocids:
- if docid in documentids:
- docdeletedpages = deletedpages[docid] if docid in deletedpages else []
- docpageflag = docpageflags[docid]
- for pageflag in _pageflags:
- filteredpages = self.__get_pages_by_flagid(docpageflag["pageflag"], docdeletedpages, pagecount, pageflag["pageflagid"], message.category)
- if len(filteredpages) > 0:
- originalpagenos = [pg['originalpageno'] for pg in filteredpages]
- docpagesections = documentpageflag().getsections_by_documentid_pageno(redactionlayerid, docid, originalpagenos)
- docpageconsults = self.__get_consults_by_pageno(programareas, docpageflag["pageflag"], filteredpages)
- pageflag['docpageflags'] = pageflag['docpageflags'] + self.__get_pagesection_mapping(filteredpages, docpagesections, docpageconsults)
- skippages = self.__get_skippagenos(docpageflag['pageflag'], message.category)
- pagecount = (pagecount+stitchedpagedata[docid]["pagecount"])-len(skippages)
- print("\n_pageflags1",_pageflags)
- for pageflag in _pageflags:
- _data = {}
- if len(pageflag['docpageflags']) > 0:
+ try:
+ for docid in ordereddocids:
+ if docid in documentids:
+ docdeletedpages = deletedpages[docid] if docid in deletedpages else []
+ if docpageflags is not None and docid in docpageflags.keys():
+ docpageflag = docpageflags[docid]
+ for pageflag in _pageflags:
+ filteredpages = self.__get_pages_by_flagid(docpageflag["pageflag"], docdeletedpages, pagecount, pageflag["pageflagid"], message.category)
+ if len(filteredpages) > 0:
+ originalpagenos = [pg['originalpageno'] for pg in filteredpages]
+ docpagesections = documentpageflag().getsections_by_documentid_pageno(redactionlayerid, docid, originalpagenos)
+ docpageconsults = self.__get_consults_by_pageno(programareas, docpageflag["pageflag"], filteredpages)
+ pageflag['docpageflags'] = pageflag['docpageflags'] + self.__get_pagesection_mapping(filteredpages, docpagesections, docpageconsults)
+ skippages = self.__get_skippagenos(docpageflag['pageflag'], message.category)
+ if stitchedpagedata is not None:
+ pagecount = (pagecount+stitchedpagedata[docid]["pagecount"])-len(skippages)
+ print("\n_pageflags1",_pageflags)
+ for pageflag in _pageflags:
_data = {}
- _data["flagname"] = pageflag["header"].upper()
- _data["pagecount"] = len(pageflag['docpageflags'])
- _data["sections"] = self.__format_redaction_summary(pageflag["description"], pageflag['docpageflags'], message.category)
- summarydata.append(_data)
+ if len(pageflag['docpageflags']) > 0:
+ _data = {}
+ _data["flagname"] = pageflag["header"].upper()
+ _data["pagecount"] = len(pageflag['docpageflags'])
+ _data["sections"] = self.__format_redaction_summary(pageflag["description"], pageflag['docpageflags'], message.category)
+ summarydata.append(_data)
+ #remove duplicate and NR for oipc review redline
+ def removeduplicateandnr(pageflag):
+ if pageflag['flagname'].lower() != 'duplicate' and pageflag['flagname'].lower() != 'not responsive':
+ return True
+ return False
+ if message.category == "oipcreviewredline":
+ print("\n removing duplicate and not responsive pages from summary")
+ summarydata = list(filter(removeduplicateandnr, summarydata))
+ except (Exception) as err:
+ traceback.print_exc()
+ print('error occured in __packaggesummary redaction dts service: ', err)
return {"requestnumber": message.requestnumber, "data": summarydata}
except (Exception) as error:
+ traceback.print_exc()
print('error occured in redaction dts service: ', error)
@@ -89,34 +108,35 @@ def __packagesummaryforcfdrequests(self, message, documentids):
docpageflags = documentpageflag().get_documentpageflag(message.ministryrequestid, redactionlayerid, ordereddocids)
sorted_docpageflags = {k: docpageflags[k] for k in ordereddocids}
- print("============>sorted_docpageflags:", sorted_docpageflags)
+ # print("============>sorted_docpageflags:", sorted_docpageflags)
deletedpages = self.__getdeletedpages(message.ministryrequestid, ordereddocids)
#print("============>deletedpages:", deletedpages)
mapped_flags = self.process_page_flags(sorted_docpageflags,deletedpages)
#print("###mapped_flags1:",mapped_flags)
pagecounts= self.count_pages_per_doc(mapped_flags)
- print("pagecounts:",pagecounts)
+ # print("pagecounts:",pagecounts)
#document_pages = self.__get_document_pages(docpageflags)
#original_pages = self.__adjust_original_pages(document_pages)
end_page = 0
for record in records:
- print("-----------------------Record : ---------------------------", record["documentids"])
- record_range, totalpagecount1,end_page = self.__createrecordpagerange(record, pagecounts,end_page )
- print(f"Range for each record- record_range:{record_range} &&& totalpagecount1:{totalpagecount1} \
- &&& end_page-{end_page}")
- self.assignfullpagesections(redactionlayerid, mapped_flags)
- print("\nMapped_flags::",mapped_flags)
- range_result = self.__calculate_range(mapped_flags, record["documentids"])
- print("range_result:",range_result)
- recordwise_pagecount = next((record["pagecount"] for record in record_range if record["recordname"] == record['recordname'].upper()), 0)
- print(f"{record['recordname']} :{recordwise_pagecount}")
- summarydata.append(self.__create_summary_data(record, range_result, mapped_flags, recordwise_pagecount))
-
- print("\n summarydata:",summarydata)
+ if record["documentids"][0] in pagecounts:
+ # print("-----------------------Record : ---------------------------", record["documentids"])
+ record_range, totalpagecount1,end_page = self.__createrecordpagerange(record, pagecounts,end_page )
+ # print(f"Range for each record- record_range:{record_range} &&& totalpagecount1:{totalpagecount1} \
+ # &&& end_page-{end_page}")
+ self.assignfullpagesections(redactionlayerid, mapped_flags)
+ # print("\nMapped_flags::",mapped_flags)
+ range_result = self.__calculate_range(mapped_flags, record["documentids"])
+ # print("range_result:",range_result)
+ recordwise_pagecount = next((record["pagecount"] for record in record_range if record["recordname"] == record['recordname'].upper()), 0)
+ # print(f"{record['recordname']} :{recordwise_pagecount}")
+ summarydata.append(self.__create_summary_data(record, range_result, mapped_flags, recordwise_pagecount))
+
+ # print("\n summarydata:",summarydata)
return {"requestnumber": message.requestnumber, "data": summarydata}
except Exception as error:
- print('Error occurred in redaction dts service: ', error)
+ print('CFD Error occurred in redaction dts service: ', error)
def __calculate_range(self, mapped_flags, docids):
@@ -132,17 +152,17 @@ def __calculate_range(self, mapped_flags, docids):
grouped_flags= self.__groupbysections(filtered_mapper)
ranges = self.__create_ranges(grouped_flags)
- print("\n ranges:",ranges)
- return {"range": f"{min_stitched_page}-{max_stitched_page}" if min_stitched_page != max_stitched_page else f"{min_stitched_page}", "flagged_range":ranges}
+ # print("\n ranges:",ranges)
+ return {"range": f"{min_stitched_page} - {max_stitched_page}" if min_stitched_page != max_stitched_page else f"{min_stitched_page}", "flagged_range":ranges}
def assignfullpagesections(self, redactionlayerid, mapped_flags):
document_pages= self.get_sorted_original_pages_by_docid(mapped_flags)
- print("document_pages:",document_pages)
+ # print("document_pages:",document_pages)
for item in document_pages:
for doc_id, pages in item.items():
docpagesections = documentpageflag().getsections_by_documentid_pageno(redactionlayerid, doc_id, pages)
- print(f"\n doc_id-{doc_id}, docpagesections-{docpagesections}")
+ # print(f"\n doc_id-{doc_id}, docpagesections-{docpagesections}")
for flag in mapped_flags:
if flag['docid'] == doc_id and flag['flagid'] == 3:
flag['sections']= self.__get_sections_mcf1(docpagesections, flag['dbpageno'])
@@ -151,7 +171,7 @@ def assignfullpagesections(self, redactionlayerid, mapped_flags):
def __get_sections_mcf1(self, docpagesections, pageno):
sections = []
filtered = [x for x in docpagesections if x['pageno'] == pageno]
- print(f"\n pageno-{pageno}, filtered-{filtered}")
+ # print(f"\n pageno-{pageno}, filtered-{filtered}")
if filtered:
for dta in filtered:
sections += [x.strip() for x in dta['section'].split(",")]
@@ -185,12 +205,12 @@ def __createrecordpagerange(self, record, pagecounts, previous_end_page=0):
totalpagecount1 += pagecounts[doc_id]
if totalpagecount1 == 0:
- return [], previous_end_page
+ return [], totalpagecount1, previous_end_page
start_page = previous_end_page + 1
end_page = previous_end_page + totalpagecount1
- range_string = f"{start_page}-{end_page}" if totalpagecount1 > 1 else f"{start_page}"
+ range_string = f"{start_page} - {end_page}" if totalpagecount1 > 1 else f"{start_page}"
result = {
"recordname": record['recordname'].upper(),
"range": range_string,
@@ -258,7 +278,7 @@ def process_page_flags(self,docpageflags, deletedpages):
def __groupbysections(self, filtered_mapper):
- print("\n __groupbysections: ", filtered_mapper)
+ # print("\n __groupbysections: ", filtered_mapper)
# Group by sections
grouped_flags = defaultdict(list)
for flag in filtered_mapper:
@@ -266,7 +286,7 @@ def __groupbysections(self, filtered_mapper):
sections_key = tuple(flag['sections']) if 'sections' in flag and flag['sections'] else ('No Section',)
grouped_flags[sections_key].append(flag)
grouped_flags = dict(grouped_flags)
- print("\n grouped_flags:", grouped_flags)
+ # print("\n grouped_flags:", grouped_flags)
return grouped_flags
@@ -286,14 +306,14 @@ def __create_ranges(self, grouped_flags):
if start == prev:
range_list.append(f"{start}")
else:
- range_list.append(f"{start}-{prev}")
+ range_list.append(f"{start} - {prev}")
start = page
prev = page
# Add the last range
if start == prev:
range_list.append(f"{start}")
else:
- range_list.append(f"{start}-{prev}")
+ range_list.append(f"{start} - {prev}")
# Save the range list for the current sections_key
ranges[sections_key] = range_list
return ranges
@@ -323,7 +343,7 @@ def generate_text(self, range_result):
# Format the section information
formatted_sections = f"{pageflag} under {sections_str}" if sections_str else ""
# Append the formatted text to the section list
- section_list.append({"formatted" :f"{range_item} were {formatted_sections}" if formatted_sections else range_item})
+ section_list.append({"formatted" :f"pg(s). {range_item} {formatted_sections}" if formatted_sections else range_item})
return section_list
@@ -419,10 +439,10 @@ def __get_pagesection_mapping(self, docpages, docpagesections, docpageconsults):
def __get_sections(self, docpagesections, pageno):
- print(f"\n pageno-{pageno}, docpagesections-{docpagesections}")
+ # print(f"\n pageno-{pageno}, docpagesections-{docpagesections}")
sections = []
filtered = [x for x in docpagesections if x['pageno'] == pageno]
- print("\n filtered:",filtered)
+ # print("\n filtered:",filtered)
for dta in filtered:
sections += [x.strip() for x in dta['section'].split(",")]
return list(filter(None, sections))
@@ -439,7 +459,7 @@ def __get_pages_by_flagid(self, _docpageflags, deletedpages, totalpages, flagid,
def __get_skippagenos(self, _docpageflags, category):
skippages = []
- if category in ['responsepackage', 'CFD_responsepackage']:
+ if category in ['responsepackage', 'CFD_responsepackage', 'oipcreviewredline']:
for x in _docpageflags:
if x['flagid'] in (5,6) and x['page'] not in skippages:
skippages.append(x['page'])
@@ -447,7 +467,7 @@ def __get_skippagenos(self, _docpageflags, category):
def __calcstitchedpageno(self, pageno, totalpages, category, skippages, deletedpages):
skipcount = 0
- if category in ["responsepackage", 'CFD_responsepackage']:
+ if category in ["responsepackage", 'CFD_responsepackage', 'oipcreviewredline']:
skipcount = self.__calculateskipcount(pageno, skippages)
skipcount = self.__calculateskipcount(pageno, deletedpages, skipcount)
return (pageno+totalpages)-skipcount
diff --git a/computingservices/DocumentServices/services/redactionsummaryservice.py b/computingservices/DocumentServices/services/redactionsummaryservice.py
index 2984c52b1..adf5ba6d7 100644
--- a/computingservices/DocumentServices/services/redactionsummaryservice.py
+++ b/computingservices/DocumentServices/services/redactionsummaryservice.py
@@ -13,32 +13,37 @@ class redactionsummaryservice():
def processmessage(self,incomingmessage):
summaryfilestozip = []
message = get_in_redactionsummary_msg(incomingmessage)
+ print('\n 1. get_in_redactionsummary_msg is : {0}'.format(message))
try:
+ category = message.category
+ #Condition to handle consults packaages (no summary files to be created)
+ if category == "consultpackage":
+ return summaryfilestozip
pdfstitchjobactivity().recordjobstatus(message,3,"redactionsummarystarted")
summarymsg = message.summarydocuments
#Condition for handling oipcredline category
bcgovcode= message.bcgovcode
- category = message.category
- if bcgovcode == 'mcf':
+ requesttype = message.requesttype
+ if bcgovcode == 'mcf' and requesttype == 'personal' and category == 'responsepackage':
documenttypename= 'CFD_responsepackage_redaction_summary'
else:
documenttypename= category+"_redaction_summary" if category == 'responsepackage' else "redline_redaction_summary"
- #print('documenttypename', documenttypename)
+ print('\n 2. documenttypename', documenttypename)
upload_responses=[]
pageflags = self.__get_pageflags(category)
programareas = documentpageflag().get_all_programareas()
messageattributes= json.loads(message.attributes)
- #print("\nmessageattributes:",messageattributes)
+ print("\n 3. messageattributes:",messageattributes)
divisiondocuments = get_in_summary_object(summarymsg).pkgdocuments
- #print("\n divisiondocuments:",divisiondocuments)
+ print("\n 4. divisiondocuments:",divisiondocuments)
for entry in divisiondocuments:
#print("\n entry:",entry)
if 'documentids' in entry and len(entry['documentids']) > 0 :
- # print("\n entry['divisionid']:",entry['divisionid'])
+ print("\n 5. entry['divisionid']:",entry['divisionid'])
divisionid = entry['divisionid']
documentids = entry['documentids']
formattedsummary = redactionsummary().prepareredactionsummary(message, documentids, pageflags, programareas)
- #print("formattedsummary", formattedsummary)
+ print("\n 6. formattedsummary", formattedsummary)
template_path='templates/'+documenttypename+'.docx'
redaction_summary= documentgenerationservice().generate_pdf(formattedsummary, documenttypename,template_path)
divisioname = None
@@ -58,7 +63,7 @@ def processmessage(self,incomingmessage):
s3uricategoryfolder = category
s3uri = stitcheddocs3uri.split(s3uricategoryfolder+"/")[0] + s3uricategoryfolder+"/"
filename =self.__get_summaryfilename(message.requestnumber, category, divisioname, stitcheddocfilename)
- print("\n filename:",filename)
+ print("\n redaction_summary.content length: {0}".format(len(redaction_summary.content)))
uploadobj= uploadbytes(filename,redaction_summary.content,s3uri)
upload_responses.append(uploadobj)
if uploadobj["uploadresponse"].status_code == 200:
@@ -68,10 +73,11 @@ def processmessage(self,incomingmessage):
summaryuploaderror= True
summaryuploaderrormsg = uploadobj.uploadresponse.text
pdfstitchjobactivity().recordjobstatus(message,4,"redactionsummaryuploaded",summaryuploaderror,summaryuploaderrormsg)
- print("\ns3uripath:",uploadobj["documentpath"])
+ # print("\ns3uripath:",uploadobj["documentpath"])
summaryfilestozip.append({"filename": uploadobj["filename"], "s3uripath":uploadobj["documentpath"]})
return summaryfilestozip
except (Exception) as error:
+ traceback.print_exc()
print('error occured in redaction summary service: ', error)
pdfstitchjobactivity().recordjobstatus(message,4,"redactionsummaryfailed",str(error),"summary generation failed")
return summaryfilestozip
@@ -86,7 +92,7 @@ def __get_summaryfilename(self, requestnumber, category, divisionname, stitchedd
_filename = requestnumber+" - "+category
if divisionname not in (None, ''):
_filename = _filename+" - "+divisionname
- print("---->",stitchedfilepath+_filename+" - summary.pdf")
+ # print("---->",stitchedfilepath+_filename+" - summary.pdf")
return stitchedfilepath+_filename+" - summary.pdf"
def __get_pageflags(self, category):
diff --git a/computingservices/DocumentServices/services/zippingservice.py b/computingservices/DocumentServices/services/zippingservice.py
index c997af363..06644da8c 100644
--- a/computingservices/DocumentServices/services/zippingservice.py
+++ b/computingservices/DocumentServices/services/zippingservice.py
@@ -11,14 +11,16 @@ def sendtozipper(self, summaryfiles, message):
def preparemessageforzipperservice(self,summaryfiles, message):
try:
msgjson= json.loads(message)
+ msgjson.pop('requesttype', None)
if summaryfiles and len(summaryfiles) > 0:
filestozip_list = json.loads(msgjson['filestozip'])+summaryfiles
else:
- filestozip_list = msgjson['filestozip']
+ filestozip_list = json.loads(msgjson['filestozip'])
print('filestozip_list: ', filestozip_list)
msgjson['filestozip'] = self.to_json(filestozip_list)
msgjson['attributes'] = self.to_json(msgjson['attributes'])
- msgjson['summarydocuments'] = self.to_json(msgjson['summarydocuments'])
+ msgjson['summarydocuments'] = self.to_json(msgjson['summarydocuments'])
+
return msgjson
except (Exception) as error:
print('error occured in zipping service: ', error)
diff --git a/computingservices/DocumentServices/templates/CFD_responsepackage_redaction_summary.docx b/computingservices/DocumentServices/templates/CFD_responsepackage_redaction_summary.docx
index feac3cba3..fa9a6034a 100644
Binary files a/computingservices/DocumentServices/templates/CFD_responsepackage_redaction_summary.docx and b/computingservices/DocumentServices/templates/CFD_responsepackage_redaction_summary.docx differ
diff --git a/computingservices/DocumentServices/templates/redline_redaction_summary.docx b/computingservices/DocumentServices/templates/redline_redaction_summary.docx
index e08da9f00..35b681462 100644
Binary files a/computingservices/DocumentServices/templates/redline_redaction_summary.docx and b/computingservices/DocumentServices/templates/redline_redaction_summary.docx differ
diff --git a/computingservices/DocumentServices/templates/responsepackage_redaction_summary.docx b/computingservices/DocumentServices/templates/responsepackage_redaction_summary.docx
index 86a3730ab..e81ba1b3f 100644
Binary files a/computingservices/DocumentServices/templates/responsepackage_redaction_summary.docx and b/computingservices/DocumentServices/templates/responsepackage_redaction_summary.docx differ
diff --git a/computingservices/PageCountCalculator/services/dal/pagecount/ministryservice.py b/computingservices/PageCountCalculator/services/dal/pagecount/ministryservice.py
index a358236b4..8d415ce29 100644
--- a/computingservices/PageCountCalculator/services/dal/pagecount/ministryservice.py
+++ b/computingservices/PageCountCalculator/services/dal/pagecount/ministryservice.py
@@ -11,7 +11,7 @@ def getlatestrecordspagecount(cls, ministryrequestid):
cursor = conn.cursor()
query = '''
SELECT recordspagecount
- FROM public."FOIMinistryRequests"
+ FROM "FOIMinistryRequests"
WHERE foiministryrequestid = %s::integer AND isactive = true
ORDER BY version DESC LIMIT 1;
'''
@@ -33,7 +33,7 @@ def updaterecordspagecount(cls, ministryrequestid, pagecount, userid):
try:
cursor = conn.cursor()
query = '''
- UPDATE public."FOIMinistryRequests" SET recordspagecount = %s::integer, updated_at = %s, updatedby = %s
+ UPDATE "FOIMinistryRequests" SET recordspagecount = %s::integer, updated_at = %s, updatedby = %s
WHERE foiministryrequestid = %s::integer AND isactive = true;
'''
parameters = (pagecount, datetime.now().isoformat(), userid, ministryrequestid,)
diff --git a/computingservices/ZippingServices/models/redlineresponsenotificationmessage.py b/computingservices/ZippingServices/models/redlineresponsenotificationmessage.py
index 5028e8783..cc562164f 100644
--- a/computingservices/ZippingServices/models/redlineresponsenotificationmessage.py
+++ b/computingservices/ZippingServices/models/redlineresponsenotificationmessage.py
@@ -1,6 +1,7 @@
class redlineresponsenotificationmessage(object):
- def __init__(self, ministryrequestid, serviceid, errorflag, createdby) -> None:
+ def __init__(self, ministryrequestid, serviceid, errorflag, createdby,feeoverridereason="") -> None:
self.ministryrequestid = ministryrequestid
self.serviceid = serviceid
self.errorflag = errorflag
self.createdby = createdby
+ self.feeoverridereason=feeoverridereason
diff --git a/computingservices/ZippingServices/models/zipperproducermessage.py b/computingservices/ZippingServices/models/zipperproducermessage.py
index deee421e1..41b175753 100644
--- a/computingservices/ZippingServices/models/zipperproducermessage.py
+++ b/computingservices/ZippingServices/models/zipperproducermessage.py
@@ -1,5 +1,5 @@
class zipperproducermessage(object):
- def __init__(self,jobid,requestid,category,requestnumber,bcgovcode,createdby,ministryrequestid,filestozip,finaloutput,attributes,summarydocuments=None,redactionlayerid=None,foldername=None) -> None:
+ def __init__(self,jobid,requestid,category,requestnumber,bcgovcode,createdby,ministryrequestid,filestozip,finaloutput,attributes,feeoverridereason=None,summarydocuments=None,redactionlayerid=None,foldername=None) -> None:
self.jobid = jobid
self.requestid = requestid
self.category=category
@@ -13,3 +13,4 @@ def __init__(self,jobid,requestid,category,requestnumber,bcgovcode,createdby,min
self.foldername = foldername
self.summarydocuments = summarydocuments
self.redactionlayerid = redactionlayerid
+ self.feeoverridereason= feeoverridereason
\ No newline at end of file
diff --git a/computingservices/ZippingServices/requirements.txt b/computingservices/ZippingServices/requirements.txt
index 6e9366875..a08f010df 100644
Binary files a/computingservices/ZippingServices/requirements.txt and b/computingservices/ZippingServices/requirements.txt differ
diff --git a/computingservices/ZippingServices/services/notificationservice.py b/computingservices/ZippingServices/services/notificationservice.py
index f237393f9..5fc5f1c5c 100644
--- a/computingservices/ZippingServices/services/notificationservice.py
+++ b/computingservices/ZippingServices/services/notificationservice.py
@@ -44,6 +44,7 @@ def __responsepackagepublishtostream(self, message, error=False):
serviceid="pdfstitchforresponsepackage",
createdby=message.createdby,
errorflag=self.__booltostr(error),
+ feeoverridereason= message.feeoverridereason
)
logging.info(
diff --git a/computingservices/ZippingServices/services/zipperservice.py b/computingservices/ZippingServices/services/zipperservice.py
index dddcc827d..312ad7562 100644
--- a/computingservices/ZippingServices/services/zipperservice.py
+++ b/computingservices/ZippingServices/services/zipperservice.py
@@ -14,7 +14,7 @@
from .notificationservice import notificationservice
import json
import traceback
-
+import PyPDF2
def processmessage(message):
try:
@@ -110,8 +110,22 @@ def __zipfilesandupload(_message, s3credentials):
for fileobj in _jsonfiles:
filename = fileobj["filename"]
print("\nfilename:",filename)
+
+ _docbytes = __getdocumentbytearray(fileobj, s3credentials)
+ _formattedbytes = None
+
+ try:
+ _formattedbytes = __removesensitivecontent(_docbytes)
+ if _formattedbytes is not None:
+ print("_formattedbytes length is {0}".format(len(_formattedbytes)))
+ else:
+ print("_formattedbytes is none")
+ except Exception:
+ print("error happened while removing sensitive content of {0} ".format(filename))
+ print(traceback.format_exc())
+ #added a space to try out code merge on git. 18-Sept-2024
zip.writestr(
- filename, __getdocumentbytearray(fileobj, s3credentials)
+ filename, _docbytes if _formattedbytes is None else _formattedbytes
)
tp.seek(0)
@@ -136,6 +150,22 @@ def __zipfilesandupload(_message, s3credentials):
finally:
zipped_bytes = None
+def __removesensitivecontent(documentbytes):
+ # clear metadata
+ reader2 = PyPDF2.PdfReader(BytesIO(documentbytes))
+ # Check if metadata exists.
+ #if reader2.metadata is not None:
+ # Create a new PDF file without metadata.
+ writer = PyPDF2.PdfWriter()
+ # Copy pages from the original PDF to the new PDF.
+ for page_num in range(len(reader2.pages)):
+ page = reader2.pages[page_num]
+ writer.add_page(page)
+ #writer.remove_links() # to remove comments.
+ buffer = BytesIO()
+ writer.write(buffer)
+ return buffer.getvalue()
+
def __getzipfilepath(foldername, filename):
return (
@@ -143,3 +173,20 @@ def __getzipfilepath(foldername, filename):
if foldername is not None
else filename + ".zip"
)
+
+
+def __removesensitivecontent(documentbytes):
+ # clear metadata
+ reader2 = PyPDF2.PdfReader(BytesIO(documentbytes))
+ # Check if metadata exists.
+ if reader2.metadata is not None:
+ # Create a new PDF file without metadata.
+ writer = PyPDF2.PdfWriter()
+ # Copy pages from the original PDF to the new PDF.
+ for page_num in range(len(reader2.pages)):
+ page = reader2.pages[page_num]
+ writer.add_page(page)
+ #writer.remove_links() # to remove comments.
+ buffer = BytesIO()
+ writer.write(buffer)
+ return buffer.getvalue()
\ No newline at end of file
diff --git a/web/public/stylesheets/webviewer.css b/web/public/stylesheets/webviewer.css
index 9084bb2bc..bbf51b3c6 100644
--- a/web/public/stylesheets/webviewer.css
+++ b/web/public/stylesheets/webviewer.css
@@ -19,6 +19,11 @@
cursor: not-allowed !important;
}
+.consult_package:disabled {
+ color: #999999 !important;
+ cursor: not-allowed !important;
+}
+
.file-upload-toast {
.Toastify__toast-body {
> div:last-child {
diff --git a/web/src/actions/actionConstants.ts b/web/src/actions/actionConstants.ts
index cbae36c3d..a93f87dce 100644
--- a/web/src/actions/actionConstants.ts
+++ b/web/src/actions/actionConstants.ts
@@ -20,6 +20,7 @@ const ACTION_CONSTANTS = {
INC_REDACTION_LAYER: "INC_REDACTION_LAYER",
SET_REQUEST_NUMBER:"SET_REQUEST_NUMBER",
SET_DELETED_PAGES: "SET_DELETED_PAGES",
+ SET_PUBLIC_BODIES: "SET_PUBLIC_BODIES",
FOI_PERSONAL_SECTIONS: "FOI_PERSONAL_SECTIONS",
FOI_PERSONAL_PEOPLE: "FOI_PERSONAL_PEOPLE",
FOI_PERSONAL_FILETYPES: "FOI_PERSONAL_FILETYPES",
diff --git a/web/src/actions/documentActions.ts b/web/src/actions/documentActions.ts
index da519f073..22b5ff4b4 100644
--- a/web/src/actions/documentActions.ts
+++ b/web/src/actions/documentActions.ts
@@ -1,5 +1,13 @@
import ACTION_CONSTANTS from "./actionConstants";
+type PublicBody = {
+ bcgovcode: string,
+ iaocode: string,
+ name: string,
+ isactive: boolean,
+ type: string,
+ programareaid: number
+}
export const setRedactionInfo = (data: any) => (dispatch:any) =>{
dispatch({
@@ -8,7 +16,6 @@ export const setRedactionInfo = (data: any) => (dispatch:any) =>{
})
}
-
export const setIsPageLeftOff = (data: any) => (dispatch:any) =>{
dispatch({
type:ACTION_CONSTANTS.SET_IS_PAGE_LEFT_OFF,
@@ -93,6 +100,13 @@ export const setDeletedPages = (data: any) => (dispatch:any) =>{
})
}
+export const setPublicBodies = (data: PublicBody[]) => (dispatch:any) =>{
+ dispatch({
+ type:ACTION_CONSTANTS.SET_PUBLIC_BODIES,
+ payload:data
+ })
+}
+
export const setFOIPersonalSections = (data: any) => (dispatch:any) =>{
dispatch({
type:ACTION_CONSTANTS.FOI_PERSONAL_SECTIONS,
@@ -116,4 +130,4 @@ export const setFOIPersonalSections = (data: any) => (dispatch:any) =>{
type:ACTION_CONSTANTS.FOI_PERSONAL_VOLUMES,
payload:data
})
- }
\ No newline at end of file
+ }
diff --git a/web/src/apiManager/services/docReviewerService.tsx b/web/src/apiManager/services/docReviewerService.tsx
index 03ed64e41..b840059e3 100644
--- a/web/src/apiManager/services/docReviewerService.tsx
+++ b/web/src/apiManager/services/docReviewerService.tsx
@@ -4,7 +4,7 @@ import API from "../endpoints";
import UserService from "../../services/UserService";
import { setRedactionInfo, setIsPageLeftOff, setSections,
setDocumentList, setRequestStatus, setRedactionLayers, incrementLayerCount, setRequestNumber, setRequestInfo, setDeletedPages,
- setFOIPersonalSections, setFOIPersonalPeople, setFOIPersonalFiletypes, setFOIPersonalVolumes
+ setFOIPersonalSections, setFOIPersonalPeople, setFOIPersonalFiletypes, setFOIPersonalVolumes, setPublicBodies
} from "../../actions/documentActions";
import { store } from "../../services/StoreService";
import { number } from "yargs";
@@ -36,7 +36,6 @@ export const fetchDocuments = (
store.dispatch(setRequestNumber(res.data.requestnumber) as any);
store.dispatch(setRequestStatus(res.data.requeststatuslabel) as any);
store.dispatch(setRequestInfo(res.data.requestinfo) as any);
- // callback(__files, res.data.documentdivisions, res.data.requestinfo);
callback(res.data.documents, res.data.documentdivisions, res.data.requestinfo);
} else {
throw new Error();
@@ -305,6 +304,7 @@ export const fetchPageFlagsMasterData = (
.then((res:any) => {
if (res.data || res.data === "") {
callback(res.data);
+ store.dispatch(setPublicBodies(res.data.find((flag: any) => flag.name === 'Consult').programareas));
} else {
throw new Error();
}
@@ -544,7 +544,6 @@ export const fetchPersonalAttributes = (
httpGETRequest(apiUrlGet, {}, UserService.getToken())
.then((res:any) => {
if (res.data) {
- console.log("fetchPersonalAttributes: ", res.data);
store.dispatch(setFOIPersonalPeople(res.data) as any);
store.dispatch(setFOIPersonalFiletypes(res.data) as any);
store.dispatch(setFOIPersonalVolumes(res.data) as any);
diff --git a/web/src/apiManager/services/foiOSSService.tsx b/web/src/apiManager/services/foiOSSService.tsx
index bb170b92a..82a79a962 100644
--- a/web/src/apiManager/services/foiOSSService.tsx
+++ b/web/src/apiManager/services/foiOSSService.tsx
@@ -59,6 +59,8 @@ export const getFOIS3DocumentRedlinePreSignedUrl = (
if (layertype === "oipcreview") {
apiurl = apiurl + "/oipcreview"
+ } else if (layertype === "consult") {
+ apiurl = apiurl + "/consult"
} else {
apiurl = apiurl + "/" + layer
}
diff --git a/web/src/components/FOI/App.scss b/web/src/components/FOI/App.scss
index f518f21da..4c00c7c64 100644
--- a/web/src/components/FOI/App.scss
+++ b/web/src/components/FOI/App.scss
@@ -82,6 +82,7 @@
li.modal-message-list-item {
margin: 6px 0;
+ font-size: 15px;
}
#state-change-dialog-title .MuiIconButton-root {
@@ -144,6 +145,12 @@ li.modal-message-list-item {
height: calc(100% - 198px);
}
+.modal-content{
+ padding: 20px 30px !important;
+ overflow-y: visible !important;
+ height: calc(100% - 198px);
+}
+
.section-list{
>li:nth-child(odd) {
background-color: #E5EAEF;
@@ -221,6 +228,10 @@ li.modal-message-list-item {
min-height: 350px !important;
}
+.consult-modal {
+ min-height: 600px !important;
+}
+
.redline-checkmark {
height:14px;
width:14px;
diff --git a/web/src/components/FOI/Home/ConfirmationModal.js b/web/src/components/FOI/Home/ConfirmationModal.js
index 651cf6e8c..b4f1dc370 100644
--- a/web/src/components/FOI/Home/ConfirmationModal.js
+++ b/web/src/components/FOI/Home/ConfirmationModal.js
@@ -9,9 +9,10 @@ import DialogContentText from "@mui/material/DialogContentText";
import DialogTitle from "@mui/material/DialogTitle";
import CloseIcon from "@mui/icons-material/Close";
import IconButton from "@mui/material/IconButton";
+import Grid from '@mui/material/Grid';
+import { Tooltip } from '@mui/material';
//import type { ReactModalProps } from './types';
-
export const ConfirmationModal= ({
cancelRedaction,
redlineModalOpen,
@@ -22,8 +23,16 @@ export const ConfirmationModal= ({
handleIncludeDuplicantePages,
isDisableNRDuplicate,
saveDoc,
- modalData
+ modalData,
+ documentPublicBodies,
+ handleSelectedPublicBodies,
+ selectedPublicBodyIDs,
+ consultApplyRedactions,
+ handleApplyRedactions,
+ consultApplyRedlines,
+ handleApplyRedlines
}) => {
+ let disableConsultSaveButton = modalData?.modalFor === "consult" && selectedPublicBodyIDs.length < 1;
return (
@@ -72,11 +81,79 @@ export const ConfirmationModal= ({
/>
Include Duplicate pages
>}
+ {modalData?.modalFor === "consult" &&
+ <>
+
+ {documentPublicBodies?.map((publicBody) => {
+ return (<>
+
+
+
+ {publicBody.iaocode}
+
+
+ >)
+ })}
+
+
+ More Options:
+
+ Include NR pages
+
+
+ Include Duplicate pages
+
+
+ Include Transparent Redactions (Redlines)
+
+
+ Apply Redactions (NR code only)
+ >}
-
+
{modalData?.modalButtonLabel}
- Select one or more Ministry you with the send the selected page(s) to for consult.
+ Select one or more public bodies you wish to consult with on the selected page(s).
{programAreaList.programareas?.map((programArea: any, index: number) => (
@@ -140,7 +140,7 @@ const ConsultModal = ({
))}
- If you do not see the name of the Ministry you would like to send for consult above please type it below.
+ If you do not see the name of the public body you wish to consult with, please type it below.
{
- if(data.status == true){
- console.log("Personal attributes updated")
- }
- },
- (error: any) => console.log(error),
- {
- documentmasterids: documentMasterIDs,
- personalattributes: newPersonalAttributes,
- ministryrequestid: requestId
- },
- );
-
- setCurrentEditRecord();
- setCurPersonalAttributes({
- person: "",
- filetype: "",
- volume: "",
- trackingid: "",
- personaltag: "TBD"
- });
- setNewPersonalAttributes({});
+
+ if(currentEditRecord && !comparePersonalAttributes(newPersonalAttributes, curPersonalAttributes)) {
+ editPersonalAttributes(
+ requestId,
+ (data: any) => {
+ if(data.status == true){
+ console.log("Personal attributes updated")
+ }
+ },
+ (error: any) => console.log(error),
+ {
+ documentmasterids: documentMasterIDs,
+ personalattributes: newPersonalAttributes,
+ ministryrequestid: requestId
+ },
+ );
+
+ setCurrentEditRecord();
+ setCurPersonalAttributes({
+ person: "",
+ filetype: "",
+ volume: "",
+ trackingid: "",
+ personaltag: "TBD"
+ });
+ setNewPersonalAttributes({});
+ }
}
};
@@ -266,12 +268,12 @@ const ContextMenu = ({
Export
+ {requestInfo?.bcgovcode === "MCF" && requestInfo?.requesttype === "personal" && (<>
1
- ? { cursor: "not-allowed", color: "#cfcfcf" }
- : {}
+ ? "editPersonalTagsDisabled"
+ : "editPersonalTags"
}
onClick={() => {
if(selectedPages.length <= 1) {
@@ -282,6 +284,7 @@ const ContextMenu = ({
Edit Tags
+ >)}
Page Flags
{showPageFlagList()}
@@ -304,6 +307,7 @@ const ContextMenu = ({
setEditTagModalOpen={setEditTagModalOpen}
setOpenContextPopup={setOpenContextPopup}
setNewDivision={setDivisionModalTagValue}
+ comparePersonalAttributes={comparePersonalAttributes}
curPersonalAttributes={curPersonalAttributes}
setNewPersonalAttributes={setNewPersonalAttributes}
updatePersonalAttributes={updatePersonalAttributes}
diff --git a/web/src/components/FOI/Home/CreateResponsePDF/CreateResponsePDF.js b/web/src/components/FOI/Home/CreateResponsePDF/CreateResponsePDF.js
index e1d6feff3..5bbfa33e0 100644
--- a/web/src/components/FOI/Home/CreateResponsePDF/CreateResponsePDF.js
+++ b/web/src/components/FOI/Home/CreateResponsePDF/CreateResponsePDF.js
@@ -59,6 +59,21 @@ export const createFinalPackageSelection = (document, enableSave) => {
return finalPackageBtn;
};
+export const createConsultPackageSelection = (document, enableSave) => {
+ const consultPackageButton = document.createElement("button");
+ consultPackageButton.textContent = "Consult Public Body";
+ consultPackageButton.id = "consult_package";
+ consultPackageButton.className = "consult_package";
+ consultPackageButton.style.backgroundColor = "transparent";
+ consultPackageButton.style.border = "none";
+ consultPackageButton.style.padding = "8px 8px 8px 10px";
+ consultPackageButton.style.cursor = "pointer";
+ consultPackageButton.style.alignItems = "left";
+ consultPackageButton.disabled = !enableSave;
+
+ return consultPackageButton;
+}
+
export const renderCustomButton = (document, menu) => {
const menuBtn = document.createElement("button");
menuBtn.textContent = "Create Response PDF";
@@ -124,36 +139,80 @@ export const handleRedlineForOipcClick = (
export const handleFinalPackageClick = (
updateModalData,
- setRedlineModalOpen
+ setRedlineModalOpen,
+ outstandingBalance,
+ isBalanceFeeOverrode,
+ setOutstandingBalanceModal,
+ setIsOverride
+) => {
+
+ if(outstandingBalance > 0 && !isBalanceFeeOverrode){
+ updateModalData({
+ modalFor: "responsepackage",
+ modalTitle: "Create Package for Applicant",
+ modalMessage:[
+ "There is an outstanding balance of fees, please cancel to resolve, or click override to proceed",
+ ],
+ modalButtonLabel: "Override"
+ });
+ setOutstandingBalanceModal(true);
+ setIsOverride(false)
+ }
+ else{
+ // Download
+ updateModalData({
+ modalFor: "responsepackage",
+ modalTitle: "Create Package for Applicant",
+ modalMessage: [
+ "This should only be done when all redactions are finalized and ready to ",
+
+ be
+ ,
+ " sent to the ",
+
+ Applicant
+ ,
+ ". This will ",
+
+ permanently
+ ,
+ " apply the redactions and automatically create page stamps.",
+ ,
+ ,
+
+ When you create the response package, your web browser page
+ will automatically refresh
+ ,
+ ],
+ modalButtonLabel: "Create Applicant Package"
+ });
+ setRedlineModalOpen(true);
+ }
+};
+
+export const handleConsultPackageClick = (
+ updateModalData,
+ setRedlineModalOpen,
+ setIncludeDuplicatePages,
+ setIncludeNRPages
) => {
updateModalData({
- modalFor: "responsepackage",
- modalTitle: "Create Package for Applicant",
+ modalFor: "consult",
+ modalTitle: "Consult Public Body",
modalMessage: [
- "This should only be done when all redactions are finalized and ready to ",
-
- be
- ,
- " sent to the ",
-
- Applicant
- ,
- ". This will ",
-
- permanently
- ,
- " apply the redactions and automatically create page stamps.",
- ,
- ,
-
- When you create the response package, your web browser page
- will automatically refresh
- ,
- ],
- modalButtonLabel: "Create Applicant Package"
+ "Are you sure you want to create a consult package? A PDF will be created for each public body selected, and your web browser will automatically refresh after package creation.",
+ ,
+ ,
+
+ Select one or more public bodies you wish to create a consult package for:
+ ,
+ ],
+ modalButtonLabel: "Create Consult"
});
+ setIncludeDuplicatePages(true);
+ setIncludeNRPages(true);
setRedlineModalOpen(true);
-};
+}
export const isReadyForSignOff = (documentList, pageFlags) => {
let pageFlagArray = [];
diff --git a/web/src/components/FOI/Home/CreateResponsePDF/useSaveRedlineForSignOff.js b/web/src/components/FOI/Home/CreateResponsePDF/useSaveRedlineForSignOff.js
index 821ebf97f..0ad2cee9d 100644
--- a/web/src/components/FOI/Home/CreateResponsePDF/useSaveRedlineForSignOff.js
+++ b/web/src/components/FOI/Home/CreateResponsePDF/useSaveRedlineForSignOff.js
@@ -34,6 +34,7 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
const requestnumber = useAppSelector(
(state) => state.documents?.requestnumber
);
+ const allPublicBodies = useAppSelector((state) => state.documents?.allPublicBodies);
const toastId = React.useRef(null);
const { foiministryrequestid } = useParams();
@@ -67,7 +68,11 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
const [redlineCategory, setRedlineCategory] = useState(false);
const [filteredComments, setFilteredComments] = useState({});
const [alreadyStitchedList, setAlreadyStitchedList] = useState([]);
- const [redlineSinglePackage, setRedlineSinglePackage] = useState(null);
+ const [enableSavingConsults, setEnableSavingConsults] = useState(false);
+ const [selectedPublicBodyIDs, setSelectedPublicBodyIDs] = useState([]);
+ const [documentPublicBodies, setDocumentPublicBodies] = useState([]);
+ const [consultApplyRedactions, setConsultApplyRedactions] = useState(false);
+ const [consultApplyRedlines, setConsultApplyRedlines] = useState(false);
const requestInfo = useAppSelector((state) => state.documents?.requestinfo);
const requestType = requestInfo?.requesttype ? requestInfo.requesttype : "public";
@@ -84,9 +89,41 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
}
else {
for (let doc of divObj.documentlist) {
+ //page to pageFlag mappings logic used for consults
+ const pagePageFlagMappings = {};
+ for (let pageFlag of doc.pageFlag) {
+ if (pageFlag.page in pagePageFlagMappings) {
+ pagePageFlagMappings[pageFlag.page].push(pageFlag.flagid);
+ } else {
+ pagePageFlagMappings[pageFlag.page] = [pageFlag.flagid];
+ }
+ }
for (const flagInfo of doc.pageFlag) {
+ if (redlineCategory === "consult") {
+ const pageFlagsOnPage = pagePageFlagMappings[flagInfo.page];
+ for (let consult of doc.consult) {
+ if ((consult.page === flagInfo.page && consult.programareaid.includes(divObj.divisionid)) || (consult.page === flagInfo.page && consult.other.includes(divObj.divisionname))) {
+ if (
+ (
+ flagInfo.flagid !== pageFlagTypes["Duplicate"] && flagInfo.flagid !== pageFlagTypes["Not Responsive"]) ||
+ (
+ (includeDuplicatePages && flagInfo.flagid === pageFlagTypes["Duplicate"]) ||
+ (includeNRPages && flagInfo.flagid === pageFlagTypes["Not Responsive"])
+ )
+ ) {
+ if(isvalid === false) {
+ if ((!includeDuplicatePages && pageFlagsOnPage.some((flagId) => flagId === pageFlagTypes["Duplicate"])) || (!includeNRPages && pageFlagsOnPage.some((flagId) => flagId === pageFlagTypes["Not Responsive"]))) {
+ isvalid = false;
+ } else {
+ isvalid = true;
+ }
+ }
+ }
+ }
+ }
+ }
// Added condition to handle Duplicate/NR clicked for Redline for Sign off Modal
- if (
+ else if (
(flagInfo.flagid !== pageFlagTypes["Duplicate"] && flagInfo.flagid != pageFlagTypes["Not Responsive"]) ||
(
(includeDuplicatePages && flagInfo.flagid === pageFlagTypes["Duplicate"]) ||
@@ -154,21 +191,68 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
incompatibleFiles
) => {
let newDocList = [];
- for (let div of divisions) {
- let divDocList = documentList?.filter((doc) =>
- doc.divisions.map((d) => d.divisionid).includes(div.divisionid)
- );
- // sort based on sortorder as the sortorder added based on the LastModified
- divDocList = sortBySortOrder(divDocList);
- let incompatableList = incompatibleFiles.filter((doc) =>
- doc.divisions.map((d) => d.divisionid).includes(div.divisionid)
- );
- newDocList.push({
- divisionid: div.divisionid,
- divisionname: div.name,
- documentlist: divDocList,
- incompatableList: incompatableList,
- });
+ if (redlineCategory === "redline" || redlineCategory === "oipcreview") {
+ for (let div of divisions) {
+ let divDocList = documentList?.filter((doc) =>
+ doc.divisions.map((d) => d.divisionid).includes(div.divisionid)
+ );
+
+ // sort based on sortorder as the sortorder added based on the LastModified
+ divDocList = sortBySortOrder(divDocList);
+
+ let incompatableList = incompatibleFiles.filter((doc) =>
+ doc.divisions.map((d) => d.divisionid).includes(div.divisionid)
+ );
+ newDocList.push({
+ divisionid: div.divisionid,
+ divisionname: div.name,
+ documentlist: divDocList,
+ incompatableList: incompatableList,
+ });
+ }
+ } else if (redlineCategory === "consult") {
+ // map documents to publicBodies and custom public bodies (treated as Divisions) for consults package.
+ // Consult Package logic will treat publicbodies (program areas + custom consults) as DIVISIONS and will incorporate the existing division mapping + redline logic to generate the consult package
+ for (let publicBodyId of divisions) {
+ let publicBodyDocList = [];
+ documentList.forEach((doc) => {
+ let programareaids = new Set();
+ if (doc.consult && doc.consult.length) {
+ doc.consult.forEach((consult) => {
+ consult.programareaid.forEach((programareaid) => {
+ if (programareaid === publicBodyId) {
+ programareaids.add(programareaid);
+ }
+ })
+ });
+ for (let consult of doc.consult) {
+ for (let customPublicBody of consult.other) {
+ if (customPublicBody === publicBodyId) {
+ programareaids.add(customPublicBody);
+ }
+ }
+ }
+ }
+ for (let programareaid of programareaids) {
+ if (programareaid === publicBodyId) {
+ publicBodyDocList.push({...doc})
+ }
+ }
+ })
+ publicBodyDocList = sortBySortOrder(publicBodyDocList);
+
+ let incompatableList = [];
+
+ // Custom public bodies/consults do not exist in allPublicBodies data (BE program area data) and are stored as simple strings with pageflag data (in other array attribute).
+ // Therefore, if publicBodyInfo cannot be found in allPublicBodies, the publicbody is a custom one and we will create its 'divison' data in the FE with a random unique id (Math.floor(Math.random() * 100000)), and its publicBodyID (which is its name as a string) for consult package creation purposes
+ const publicBodyInfo = allPublicBodies.find((body) => body.programareaid === publicBodyId);
+ newDocList.push({
+ divisionid: publicBodyInfo ? publicBodyInfo.programareaid : Math.floor(Math.random() * 100000),
+ divisionname: publicBodyInfo ? publicBodyInfo.name : publicBodyId,
+ documentlist: publicBodyDocList,
+ incompatableList: incompatableList,
+ })
+ }
}
return newDocList;
};
@@ -186,6 +270,8 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
}
// sort based on sortorder as the sortorder added based on the LastModified
prepareRedlinePageMappingByRequest(sortBySortOrder(reqdocuments), pageMappedDocs);
+ } else if (redlineCategory === "consult") {
+ prepareRedlinePageMappingByConsult(divisionDocuments);
} else {
prepareRedlinePageMappingByDivision(divisionDocuments);
}
@@ -297,7 +383,6 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
});
};
-
const prepareRedlinePageMappingByDivision = (divisionDocuments) => {
let removepages = {};
let pageMappings = {};
@@ -415,11 +500,197 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
});
}
+ const prepareRedlinePageMappingByConsult = (divisionDocuments) => {
+ let removepages = {};
+ let pageMappings = {};
+ let divPageMappings = {};
+ let pagesToRemove = [];
+ let totalPageCount = 0;
+ let totalPageCountIncludeRemoved = 0;
+ for (let divObj of divisionDocuments) {
+ // sort based on sortorder as the sortorder added based on the LastModified
+ for (let doc of sortBySortOrder(divObj.documentlist)) {
+ if (doc.pagecount > 0) {
+ let pagesToRemoveEachDoc = [];
+ pageMappings[doc.documentid] = {};
+ let pageIndex = 1;
+ //gather pages that need to be removed
+ doc.pageFlag.sort((a, b) => a.page - b.page); //sort pageflag by page #
+ let skipDocumentPages = false;
+ let skipOnlyDuplicateDocument = false;
+ let skipOnlyNRDocument = false;
+ if (!includeDuplicatePages && !includeNRPages) {
+ skipDocumentPages = skipDocument(doc.pageFlag, doc.pagecount, pageFlagTypes);
+ }
+ else if (!includeDuplicatePages) {
+ skipOnlyDuplicateDocument = skipDuplicateDocument(doc.pageFlag, doc.pagecount, pageFlagTypes);
+ }
+ else if (!includeNRPages) {
+ skipOnlyNRDocument = skipNRDocument(doc.pageFlag, doc.pagecount, pageFlagTypes);
+ }
+
+ // for consults, go through all pages
+ for (const page of doc.pages) {
+ //find pageflags for this page
+ const pageFlagsOnPage = doc.pageFlag.filter((pageFlag) => {
+ return pageFlag.page === page;
+ })
+ const notConsultPageFlagsOnPage = pageFlagsOnPage.filter((pageFlag) => {
+ return pageFlag.flagid !== pageFlagTypes["Consult"];
+ })
+
+ // if the page has no pageflags, remove it
+ if (pageFlagsOnPage.length == 0) {
+ pagesToRemoveEachDoc.push(page);
+ if (!skipDocumentPages) {
+ pagesToRemove.push(
+ pageIndex + totalPageCountIncludeRemoved
+ );
+ }
+ pageIndex ++;
+ }
+
+ //differences in pagemapping for consults begin here
+ //for pages with only consult flags, remove if page doesn't belong to current consult body
+ if (pageFlagsOnPage.length > 0 && notConsultPageFlagsOnPage.length == 0) {
+ for (let flagInfo of pageFlagsOnPage) {
+ let hasConsult = false;
+ for (let consult of doc.consult) {
+ if ((consult.page === flagInfo.page && consult.programareaid.includes(divObj.divisionid)) || (consult.page === flagInfo.page && consult.other.includes(divObj.divisionname))) {
+ hasConsult = true;
+ break;
+ }
+ }
+ if (!hasConsult) {
+ if (!pagesToRemoveEachDoc.includes(flagInfo.page)) {
+ pagesToRemoveEachDoc.push(flagInfo.page);
+ if(!skipDocumentPages) {
+ delete pageMappings[doc.documentid][flagInfo.page];
+ pagesToRemove.push(pageIndex + totalPageCountIncludeRemoved)
+ }
+ }
+ } else {
+ // add page as it will match the curent publicBody / division id
+ pageMappings[doc.documentid][flagInfo.page] =
+ pageIndex +
+ totalPageCount -
+ pagesToRemoveEachDoc.length;
+ }
+ }
+ pageIndex ++;
+ }
+
+ // if the page does have pageflags, process it
+ for (let flagInfo of notConsultPageFlagsOnPage) {
+ if (flagInfo.flagid == pageFlagTypes["Duplicate"]) {
+ if(includeDuplicatePages) {
+ for (let consult of doc.consult) {
+ if ((consult.page === flagInfo.page && consult.programareaid.includes(divObj.divisionid)) || (consult.page === flagInfo.page && consult.other.includes(divObj.divisionname))) {
+ pageMappings[doc.documentid][flagInfo.page] =
+ pageIndex +
+ totalPageCount -
+ pagesToRemoveEachDoc.length;
+ }
+ }
+ } else {
+ pagesToRemoveEachDoc.push(flagInfo.page);
+ if (!skipDocumentPages && !skipOnlyDuplicateDocument) {
+ pagesToRemove.push(
+ pageIndex + totalPageCountIncludeRemoved
+ );
+ }
+ }
+
+ } else if (flagInfo.flagid == pageFlagTypes["Not Responsive"]) {
+ if(includeNRPages) {
+ for (let consult of doc.consult) {
+ if ((consult.page === flagInfo.page && consult.programareaid.includes(divObj.divisionid)) || (consult.page === flagInfo.page && consult.other.includes(divObj.divisionname))) {
+ pageMappings[doc.documentid][flagInfo.page] =
+ pageIndex +
+ totalPageCount -
+ pagesToRemoveEachDoc.length;
+ }
+ }
+ } else {
+ pagesToRemoveEachDoc.push(flagInfo.page);
+ if (!skipDocumentPages && !skipOnlyNRDocument) {
+ pagesToRemove.push(
+ pageIndex + totalPageCountIncludeRemoved
+ );
+ }
+ }
+ } else if (flagInfo.flagid == pageFlagTypes["In Progress"]) {
+ for (let consult of doc.consult) {
+ if ((consult.page === flagInfo.page && consult.programareaid.includes(divObj.divisionid)) || (consult.page === flagInfo.page && consult.other.includes(divObj.divisionname))) {
+ pageMappings[doc.documentid][flagInfo.page] =
+ pageIndex +
+ totalPageCount -
+ pagesToRemoveEachDoc.length;
+ }
+ }
+ } else {
+ if (flagInfo.flagid !== pageFlagTypes["Consult"]) {
+ pageMappings[doc.documentid][flagInfo.page] =
+ pageIndex +
+ totalPageCount -
+ pagesToRemoveEachDoc.length;
+ }
+ }
+
+ // Check if the page has relevant consult flag, if not remove the page
+ let hasConsult = false;
+ for (let consult of doc.consult) {
+ if ((consult.page === flagInfo.page && consult.programareaid.includes(divObj.divisionid)) || (consult.page === flagInfo.page && consult.other.includes(divObj.divisionname))) {
+ hasConsult = true;
+ break;
+ }
+ }
+ if (!hasConsult) {
+ if (!pagesToRemoveEachDoc.includes(flagInfo.page)) {
+ pagesToRemoveEachDoc.push(flagInfo.page);
+ if(!skipDocumentPages) {
+ delete pageMappings[doc.documentid][flagInfo.page];
+ pagesToRemove.push(pageIndex + totalPageCountIncludeRemoved)
+ }
+ }
+ }
+ if (flagInfo.flagid !== pageFlagTypes["Consult"]) {
+ pageIndex ++;
+ }
+ }
+ }
+ //End of pageMappingsByConsults
+
+ totalPageCount += Object.keys(
+ pageMappings[doc.documentid]
+ ).length;
+ if (!skipDocumentPages && !skipOnlyDuplicateDocument && !skipOnlyNRDocument) {
+ totalPageCountIncludeRemoved += doc.pagecount;
+ }
+ }
+ }
+ divPageMappings[divObj.divisionid] = pageMappings;
+ removepages[divObj.divisionid] = pagesToRemove;
+ pagesToRemove = [];
+ totalPageCount = 0;
+ totalPageCountIncludeRemoved = 0;
+ pageMappings = {}
+ }
+
+ setRedlinepageMappings({
+ 'divpagemappings': divPageMappings,
+ 'pagemapping': pageMappings,
+ 'pagestoremove': removepages
+ });
+ }
const prepareRedlineIncompatibleMapping = (redlineAPIResponse) => {
let divIncompatableMapping = {};
let incompatibleFiles = [];
let divCounter = 0;
+ if (redlineAPIResponse.consultdocumentlist) {
+ redlineAPIResponse.divdocumentList = redlineAPIResponse.consultdocumentlist
+ }
for (let divObj of redlineAPIResponse.divdocumentList) {
divCounter++;
@@ -444,6 +715,7 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
});
incompatibleFiles = incompatibleFiles.concat(divIncompatableFiles);
}
+ if (divObj.publicBody && !divObj.divisionid) divObj.divisionid = divObj.publicBody;
if (redlineAPIResponse.issingleredlinepackage == "Y") {
if (divCounter == redlineAPIResponse.divdocumentList.length) {
incompatableObj["divisionid"] = "0";
@@ -481,13 +753,15 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
}
};
const getzipredlinecategory = (layertype) => {
+ if (redlineCategory === "consult") {
+ return "consultpackage";
+ }
if (currentLayer.name.toLowerCase() === "oipc") {
return layertype === "oipcreview" ? "oipcreviewredline" : "oipcredline";
}
return "redline";
};
-
const prepareredlinesummarylist = (stitchDocuments) => {
let summarylist = [];
let alldocuments = [];
@@ -563,7 +837,7 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
loadAsPDF: true,
useDownloader: false, // Added to fix BLANK page issue
}).then(async (docObj) => {
- applyRotations(docObj, doc.attributes.rotatedpages)
+ applyRotations(docObj, doc.attributes.rotatedpages);
//if (isIgnoredDocument(doc, docObj.getPageCount(), divisionDocuments) == false) {
docCountCopy++;
docCount++;
@@ -595,7 +869,6 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
if (docCount == documentlist.length && redlineSinglePkg == "N" ) {
requestStitchObject[division] = stitchedDocObj;
}
-
}
} else {
if (incompatableList[division]["incompatibleFiles"].length > 0) {
@@ -614,12 +887,6 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
}
};
-
-
-
-
-
-
const stitchSingleDivisionRedlineExport = async (
_instance,
divisionDocuments,
@@ -739,7 +1006,48 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
});
return sortedList;
};
-
+ const getPublicBodyList = (documentList) => {
+ let publicBodyIdList = [];
+ if (documentList?.length > 0) {
+ for (const doc of documentList) {
+ if ('pageFlag' in doc) {
+ for (let pageflag of doc['pageFlag']) {
+ if ('programareaid' in pageflag) {
+ for (let programareaid of pageflag['programareaid']) {
+ publicBodyIdList.push(programareaid);
+ }
+ }
+ // Logic to include custom consults/public bodies as they are stored in another array (other) and not with programareaids
+ if ('other' in pageflag) {
+ for (let customPublicBody of pageflag['other']) {
+ publicBodyIdList.push(customPublicBody);
+ }
+ }
+ }
+ }
+ }
+ const filteredPublicBodyIdList = [...new Set(publicBodyIdList)];
+ return getPublicBodyObjs(filteredPublicBodyIdList);
+ }
+ }
+ const getPublicBodyObjs = (publicBodyIDList) => {
+ const publicBodies = [];
+ for (let publicBodyId of publicBodyIDList) {
+ const publicBody = allPublicBodies.find(publicBody => publicBody.programareaid === publicBodyId);
+ if (publicBody) {
+ publicBodies.push(publicBody);
+ } else {
+ // Custom public bodies/consults will not exist in allPublicBodies data (BE data for program areas) as they are not stored in the BE as programe areas (but rather as basic pageflags)
+ const customPublicBody = {
+ name: publicBodyId,
+ programareaid: null,
+ iaocode: publicBodyId
+ };
+ publicBodies.push(customPublicBody);
+ }
+ }
+ return publicBodies;
+ }
const saveRedlineDocument = async (
_instance,
@@ -756,7 +1064,13 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
});
const divisionFilesList = [...documentList, ...incompatibleFiles];
- const divisions = getDivisionsForSaveRedline(divisionFilesList);
+ let divisions;
+ if (redlineCategory === "consult") {
+ //Key consult logic, uses preexisting division reldine logic for consults
+ divisions = selectedPublicBodyIDs;
+ } else {
+ divisions = getDivisionsForSaveRedline(divisionFilesList);
+ }
const divisionDocuments = getDivisionDocumentMappingForRedline(divisions, documentList, incompatibleFiles);
const documentids = documentList.map((obj) => obj.documentid);
getFOIS3DocumentRedlinePreSignedUrl(
@@ -769,10 +1083,9 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
render: `Start saving redline...`,
isLoading: true,
});
- //setRedlineSinglePackage(res.issingleredlinepackage);
setIsSingleRedlinePackage(res.issingleredlinepackage);
let stitchDoc = {};
-
+
prepareRedlinePageMapping(
res['divdocumentList'],
res.issingleredlinepackage,
@@ -786,7 +1099,6 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
let documentsObjArr = [];
let divisionstitchpages = [];
let divCount = 0;
-
for (let div of res.divdocumentList) {
divCount++;
let docCount = 0;
@@ -807,9 +1119,29 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
skipOnlyNRDocument = skipNRDocument(doc.pageFlag, doc.pagecount, pageFlagTypes);
}
if (pageMappedDocs != undefined) {
- let divisionsdocpages = Object.values(
- pageMappedDocs.redlineDocIdLookup
- )
+ let divisionsdocpages = [];
+ // for consults, no need to filter by division/consult
+ if (redlineCategory === "consult") {
+ Object.values(
+ pageMappedDocs.redlineDocIdLookup
+ )
+ .forEach((obj) => {
+ divisionsdocpages = Object.values(
+ pageMappedDocs.redlineDocIdLookup
+ )
+ .filter((obj) => {
+ return obj.docId == doc.documentid;
+ })
+ .map((obj) => {
+ if (res.issingleredlinepackage == "Y" || (!skipDocumentPages && !skipOnlyDuplicateDocument && !skipOnlyNRDocument)) {
+ return obj.pageMappings;
+ }
+ });
+ })
+ } else {
+ divisionsdocpages = Object.values(
+ pageMappedDocs.redlineDocIdLookup
+ )
.filter((obj) => {
return obj.division.includes(div.divisionid) && obj.docId == doc.documentid;
})
@@ -818,6 +1150,7 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
return obj.pageMappings;
}
});
+ }
if (divisionsdocpages[0]) {
divisionsdocpages.forEach(function (_arr) {
_arr.forEach(function (value) {
@@ -833,9 +1166,6 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
);
}
}
- // if (docCount == div.documentlist.length) {
-
- // }
}
}
if (
@@ -891,7 +1221,8 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
requestnumber: res.requestnumber,
bcgovcode: res.bcgovcode,
summarydocuments: prepareredlinesummarylist(stitchDocuments),
- redactionlayerid: currentLayer.redactionlayerid
+ redactionlayerid: currentLayer.redactionlayerid,
+ requesttype: requestType
});
if (res.issingleredlinepackage === "Y") {
stitchSingleDivisionRedlineExport(
@@ -920,7 +1251,6 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
);
};
-
const checkSavingRedline = (redlineReadyAndValid, instance) => {
const validRedlineStatus = [
RequestStates["Records Review"],
@@ -934,7 +1264,6 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
!redlineReadyAndValid || !validRedlineStatus;
}
};
-
const checkSavingOIPCRedline = (
oipcRedlineReadyAndValid,
instance,
@@ -955,6 +1284,17 @@ const useSaveRedlineForSignoff = (initDocInstance, initDocViewer) => {
!readyForSignOff;
}
};
+ const checkSavingConsults = (documentList, instance) => {
+ const publicBodyList = getPublicBodyList(documentList);
+ setDocumentPublicBodies(publicBodyList);
+ setEnableSavingConsults(
+ publicBodyList.length > 0
+ );
+ if (instance) {
+ const document = instance.UI.iframeWindow.document;
+ document.getElementById("consult_package").disabled = !(publicBodyList.length > 0);
+ }
+ }
const triggerRedlineZipper = (
divObj,
stitchedDocPath,
@@ -1336,10 +1676,13 @@ const stampPageNumberRedline = async (
.replace(/"/g, '"')
.replace(/\\/g, "");
let sections = getAnnotationSections(annot);
- if (sections.some((item) => item.section === "s. 14")) {
+ if (redlineCategory === "oipcreview" && sections.some((item) => item.section === "s. 14")) {
sectionStamps[parentRedactionId] =
annotationpagenumbers[parentRedactionId];
}
+ if (redlineCategory === "consult" && sections.some(item => item.section === 'NR')) {
+ sectionStamps[parentRedactionId] = annotationpagenumbers[parentRedactionId];
+ }
}
}
}
@@ -1406,17 +1749,27 @@ const stampPageNumberRedline = async (
const downloadType = "pdf";
let currentDivisionCount = 0;
const divisionCountForToast = Object.keys(redlineStitchObject).length;
+
+ //Consult Package page removal logic
+ let pagesOfEachDivisions = {};
+ //get page numbers of each division
+ Object.keys(redlineStitchInfo).forEach((_div) => {
+ pagesOfEachDivisions[_div] = [];
+ redlineStitchInfo[_div]["stitchpages"].forEach((pageinfo) => {
+ pagesOfEachDivisions[_div].push(pageinfo["stitchedPageNo"]);
+ });
+ });
+
for (const [key, value] of Object.entries(redlineStitchObject)) {
currentDivisionCount++;
toast.update(toastId.current, {
render:
- redlineSinglePackage == "N"
- ? `Saving redline PDF for ${divisionCountForToast} divisions to Object Storage...`
+ isSingleRedlinePackage == "N"
+ ? `Saving redline PDF for ${divisionCountForToast} ${redlineCategory === "consult" ? "consultees" : "divisions"} to Object Storage...`
: `Saving redline PDF to Object Storage...`,
isLoading: true,
autoClose: 5000,
});
-
let divisionid = key;
let stitchObject = redlineStitchObject[key];
if (stitchObject == null) {
@@ -1432,14 +1785,13 @@ const stampPageNumberRedline = async (
redlinepageMappings["divpagemappings"][divisionid],
redlineStitchInfo[divisionid]["documentids"]
);
- if(redlineCategory !== "oipcreview") {
+ if (redlineCategory === "redline") {
await stampPageNumberRedline(
- stitchObject,
- PDFNet,
- redlineStitchInfo[divisionid]["stitchpages"],
- isSingleRedlinePackage
+ stitchObject,
+ PDFNet,
+ redlineStitchInfo[divisionid]["stitchpages"],
+ isSingleRedlinePackage
);
- }
if (
redlinepageMappings["pagestoremove"][divisionid] &&
redlinepageMappings["pagestoremove"][divisionid].length > 0 &&
@@ -1449,7 +1801,6 @@ const stampPageNumberRedline = async (
redlinepageMappings["pagestoremove"][divisionid]
);
}
- if (redlineCategory === "redline") {
await addWatermarkToRedline(
stitchObject,
redlineWatermarkPageMapping,
@@ -1457,61 +1808,246 @@ const stampPageNumberRedline = async (
);
}
- let string = await stitchObject.extractXFDF()
+ let string = await stitchObject.extractXFDF();
+ // for redline - formatted annots
let xmlObj = parser.parseFromString(string.xfdfString);
let annots = parser.parseFromString('' + formattedAnnotationXML + ' ');
- let annotsObj = xmlObj.getElementsByTagName('annots')
+ let annotsObj = xmlObj.getElementsByTagName('annots');
if (annotsObj.length > 0) {
- annotsObj[0].children = annotsObj[0].children.concat(annots.children)
+ annotsObj[0].children = annotsObj[0].children.concat(annots.children);
} else {
- xmlObj.children.push(annots)
- }
-
+ xmlObj.children.push(annots);
+ }
let xfdfString = parser.toString(xmlObj);
- //OIPC - Special Block (Redact S.14) : Begin
- if(redlineCategory === "oipcreview") {
- const rarr = [];
- let annotationManager = docInstance?.Core.annotationManager;
- let s14_sectionStamps = await annotationSectionsMapping(xfdfString, formattedAnnotationXML);
- let rects = [];
- for (const [key, value] of Object.entries(s14_sectionStamps)) {
- let s14annoation = annotationManager.getAnnotationById(key);
- if ( s14annoation.Subject === "Redact") {
- rects = rects.concat(
- s14annoation.getQuads().map((q) => {
- return {
- pageno: s14_sectionStamps[key],
- recto: q.toRect(),
- vpageno: s14annoation.getPageNumber()
- };
- })
+ // for oipc review - re-apply annots after redaction - annots only no widgets/form fields
+ let xmlObj1 = parser.parseFromString(string.xfdfString);
+ xmlObj1.children = [];
+ xmlObj1.children.push(annots);
+ let xfdfString1 = parser.toString(xmlObj1);
+
+ //Apply Redactions (if any)
+ //OIPC - Special Block (Redact S.14) : Begin
+ if(redlineCategory === "oipcreview") {
+ let annotationManager = docInstance?.Core.annotationManager;
+ let s14_sectionStamps = await annotationSectionsMapping(xfdfString, formattedAnnotationXML);
+ let s14annots = [];
+ for (const [key, value] of Object.entries(s14_sectionStamps)) {
+ let s14annoation = annotationManager.getAnnotationById(key);
+ if ( s14annoation.Subject === "Redact") {
+ s14annots.push(s14annoation);
+ }
+ }
+
+ let doc = docViewer.getDocument();
+ await annotationManager.applyRedactions(s14annots);
+
+ /** apply redaction and save to s3 - newXfdfString is needed to display
+ * the freetext(section name) on downloaded file.*/
+ doc
+ .getFileData({
+ // export the document to arraybuffer
+ // xfdfString: xfdfString,
+ downloadType: downloadType,
+ flatten: true,
+ })
+ .then(async (_data) => {
+ const _arr = new Uint8Array(_data);
+ const _blob = new Blob([_arr], { type: "application/pdf" });
+
+ await docInstance?.Core.createDocument(_data, {
+ loadAsPDF: true,
+ useDownloader: false, // Added to fix BLANK page issue
+ }).then( async (docObj) => {
+
+ /**must apply redactions before removing pages*/
+ if (redlinepageMappings["pagestoremove"][divisionid].length > 0) {
+ await docObj.removePages(redlinepageMappings["pagestoremove"][divisionid]);
+ }
+
+ await stampPageNumberRedline(
+ docObj,
+ PDFNet,
+ redlineStitchInfo[divisionid]["stitchpages"],
+ isSingleRedlinePackage
+ );
+
+ docObj.getFileData({
+ // saves the document with annotations in it
+ xfdfString: xfdfString1,
+ downloadType: downloadType,
+ flatten: true,
+ })
+ .then(async (__data) => {
+ const __arr = new Uint8Array(__data);
+ const __blob = new Blob([__arr], { type: "application/pdf" });
+
+ saveFilesinS3(
+ { filepath: redlineStitchInfo[divisionid]["s3path"] },
+ __blob,
+ (_res) => {
+ // ######### call another process for zipping and generate download here ##########
+ toast.update(toastId.current, {
+ render: `Redline PDF saved to Object Storage`,
+ type: "success",
+ className: "file-upload-toast",
+ isLoading: false,
+ autoClose: 3000,
+ hideProgressBar: true,
+ closeOnClick: true,
+ pauseOnHover: true,
+ draggable: true,
+ closeButton: true,
+ });
+ triggerRedlineZipper(
+ redlineIncompatabileMappings[divisionid],
+ redlineStitchInfo[divisionid]["s3path"],
+ divisionCountForToast,
+ isSingleRedlinePackage
);
- }
-
-
- }
- for (const rect of rects) {
- let height = docViewer.getPageHeight(rect.vpageno);
- rarr.push(await PDFNet.Redactor.redactionCreate(rect.pageno, (await PDFNet.Rect.init(rect.recto.x1,height-rect.recto.y1,rect.recto.x2,height-rect.recto.y2)), false, ''));
+ },
+ (_err) => {
+ console.log(_err);
+ toast.update(toastId.current, {
+ render: "Failed to save redline pdf to Object Storage",
+ type: "error",
+ className: "file-upload-toast",
+ isLoading: false,
+ autoClose: 3000,
+ hideProgressBar: true,
+ closeOnClick: true,
+ pauseOnHover: true,
+ draggable: true,
+ closeButton: true,
+ });
+ }
+ );
+ });
+ });
+ });
}
- if (rarr.length > 0) {
- const app = {};
- app.redaction_overlay = true;
- app.border = false;
- app.show_redacted_content_regions = false;
- const doc = await stitchObject.getPDFDoc();
- await PDFNet.Redactor.redact(doc, rarr, app);
+ //OIPC - Special Block : End
+ //Consults - Redlines + Redactions (Redact S.NR) Block : Start
+ else if (redlineCategory === "consult") {
+ let doc = docViewer.getDocument();
+ if (!consultApplyRedlines) {
+ const publicbodyAnnotList = xmlObj1.getElementsByTagName('annots')[0]['children'];
+ const filteredPublicbodyAnnotList = publicbodyAnnotList.filter((annot) => {
+ return annot.name !== "freetext" && annot.name !== 'redact'
+ });
+ xmlObj1.getElementsByTagName('annots')[0].children = filteredPublicbodyAnnotList;
+ xfdfString1 = parser.toString(xmlObj1);
+ }
+ if (consultApplyRedactions) {
+ let annotationManager = docInstance?.Core.annotationManager;
+ let nr_sectionStamps = await annotationSectionsMapping(xfdfString, formattedAnnotationXML);
+ let nrAnnots = [];
+ for (const [key, value] of Object.entries(nr_sectionStamps)) {
+ let nrAnnotation = annotationManager.getAnnotationById(key);
+ if (nrAnnotation.Subject === "Redact") {
+ nrAnnots.push(nrAnnotation);
+ }
+ }
+ await annotationManager.applyRedactions(nrAnnots);
+ }
+ /** apply redaction and save to s3 - newXfdfString is needed to display
+ * the freetext(section name) on downloaded file.*/
+ doc
+ .getFileData({
+ // export the document to arraybuffer
+ downloadType: downloadType,
+ flatten: true,
+ })
+ .then(async (_data) => {
+ const _arr = new Uint8Array(_data);
+ const _blob = new Blob([_arr], { type: "application/pdf" });
+
+ await docInstance?.Core.createDocument(_data, {
+ loadAsPDF: true,
+ useDownloader: false, // Added to fix BLANK page issue
+ }).then( async (docObj) => {
+
+ // Consult Pacakge page removal of pages that are not in this division (will leave consult/division specific pages in docObj to be removed by redlinepagemappings pagestoremove below)
+ let pagesNotBelongsToThisDivision = [];
+ for(let i=1; i <= docObj.getPageCount(); i++) {
+ if(!pagesOfEachDivisions[key].includes(i))
+ pagesNotBelongsToThisDivision.push(i);
+ }
+
+ if(pagesNotBelongsToThisDivision.length > 0) {
+ await docObj.removePages(pagesNotBelongsToThisDivision);
+ }
+
+ await stampPageNumberRedline(
+ docObj,
+ PDFNet,
+ redlineStitchInfo[divisionid]["stitchpages"],
+ isSingleRedlinePackage
+ );
+
+ //Consult Pacakge page removal of pages/documents associated with divison/consult
+ /**must apply redactions before removing pages*/
+ if (redlinepageMappings["pagestoremove"][divisionid].length > 0) {
+ await docObj.removePages(redlinepageMappings["pagestoremove"][divisionid]);
+ }
+
+ docObj.getFileData({
+ // saves the document with annotations in it
+ xfdfString: xfdfString1,
+ downloadType: downloadType,
+ // flatten: true,
+ })
+ .then(async (__data) => {
+ const __arr = new Uint8Array(__data);
+ const __blob = new Blob([__arr], { type: "application/pdf" });
+
+ saveFilesinS3(
+ { filepath: redlineStitchInfo[divisionid]["s3path"] },
+ __blob,
+ (_res) => {
+ // ######### call another process for zipping and generate download here ##########
+ toast.update(toastId.current, {
+ render: `Consult PDF saved to Object Storage`,
+ type: "success",
+ className: "file-upload-toast",
+ isLoading: false,
+ autoClose: 3000,
+ hideProgressBar: true,
+ closeOnClick: true,
+ pauseOnHover: true,
+ draggable: true,
+ closeButton: true,
+ });
+ triggerRedlineZipper(
+ redlineIncompatabileMappings[divisionid],
+ redlineStitchInfo[divisionid]["s3path"],
+ divisionCountForToast,
+ isSingleRedlinePackage
+ );
+ },
+ (_err) => {
+ console.log(_err);
+ toast.update(toastId.current, {
+ render: "Failed to save redline pdf to Object Storage",
+ type: "error",
+ className: "file-upload-toast",
+ isLoading: false,
+ autoClose: 3000,
+ hideProgressBar: true,
+ closeOnClick: true,
+ pauseOnHover: true,
+ draggable: true,
+ closeButton: true,
+ });
+ }
+ );
+ });
+ });
+ });
}
- await stampPageNumberRedline(
- stitchObject,
- PDFNet,
- redlineStitchInfo[divisionid]["stitchpages"],
- redlineSinglePackage
- );
- }
- //OIPC - Special Block : End
+ //Consults - Redlines + Redactions (Redact S.NR) Block : End
+ else {
stitchObject
.getFileData({
// saves the document with annotations in it
@@ -1546,7 +2082,7 @@ const stampPageNumberRedline = async (
redlineIncompatabileMappings[divisionid],
redlineStitchInfo[divisionid]["s3path"],
divisionCountForToast,
- redlineSinglePackage
+ isSingleRedlinePackage
);
},
(_err) => {
@@ -1566,12 +2102,40 @@ const stampPageNumberRedline = async (
}
);
});
+ }
}
}
};
+
+ const getAdjustedRedactionCoordinates = async(pageRotation, recto, PDFNet,pageWidth,pageHeight) => {
+ let x1 = recto.x1;
+ let y1 = recto.y1;
+ let x2 = recto.x2;
+ let y2 = recto.y2;
+ // Adjust Y-coordinates to account for the flipped Y-axis in PDF
+ y1 = pageHeight - y1;
+ y2 = pageHeight - y2;
+ // Adjust for page rotation (90, 180, 270 degrees)
+ switch (pageRotation) {
+ case 90:
+ [x1, y1] = [y1, x1];
+ [x2, y2] = [y2, x2];
+ break;
+ case 180:
+ x1 = pageWidth - x1;
+ y1 = pageHeight - y1;
+ x2 = pageWidth - x2;
+ y2 = pageHeight - y2;
+ break;
+ case 270:
+ [x1, y1] = [pageHeight - y1, x1];
+ [x2, y2] = [pageHeight - y2, x2];
+ break;
+ }
+ return await PDFNet.Rect.init(x1, y1, x2, y2);
+ }
useEffect(() => {
-
if (
redlineStitchObject &&
redlineDocumentAnnotations &&
@@ -1582,7 +2146,6 @@ const stampPageNumberRedline = async (
}
}, [redlineDocumentAnnotations, redlineStitchObject, redlineStitchInfo]);
-
useEffect(() => {
if (
pdftronDocObjectsForRedline?.length > 0 &&
@@ -1652,11 +2215,20 @@ const stampPageNumberRedline = async (
saveRedlineDocument,
enableSavingOipcRedline,
enableSavingRedline,
+ enableSavingConsults,
checkSavingRedline,
checkSavingOIPCRedline,
+ checkSavingConsults,
setRedlineCategory,
setFilteredComments,
+ setSelectedPublicBodyIDs,
+ setConsultApplyRedactions,
+ selectedPublicBodyIDs,
+ documentPublicBodies,
+ consultApplyRedactions,
+ setConsultApplyRedlines,
+ consultApplyRedlines,
};
};
-export default useSaveRedlineForSignoff;
+export default useSaveRedlineForSignoff;
\ No newline at end of file
diff --git a/web/src/components/FOI/Home/CreateResponsePDF/useSaveResponsePackage.js b/web/src/components/FOI/Home/CreateResponsePDF/useSaveResponsePackage.js
index 13e7b2d24..5569590ab 100644
--- a/web/src/components/FOI/Home/CreateResponsePDF/useSaveResponsePackage.js
+++ b/web/src/components/FOI/Home/CreateResponsePDF/useSaveResponsePackage.js
@@ -33,7 +33,11 @@ const useSaveResponsePackage = () => {
let _docmain = _docViwer.getDocument();
doc = await _docmain.getPDFDoc();
-
+
+ let docinfo = await doc.getDocInfo();
+ docinfo.setTitle(requestnumber + ".pdf");
+ docinfo.setAuthor("");
+
// Run PDFNet methods with memory management
await PDFNet.runWithCleanup(async () => {
// lock the document before a write operation
@@ -128,12 +132,12 @@ const useSaveResponsePackage = () => {
// return { sorteddocuments: sorteddocids, pkgdocuments: summarylist };
// };
- const prepareresponseredlinesummarylist = (documentlist, bcgovcode) => {
+ const prepareresponseredlinesummarylist = (documentlist, bcgovcode, requestType) => {
let summarylist = [];
let alldocuments = [];
console.log("\ndocumentlist:", documentlist);
let sorteddocids = [];
- if (bcgovcode?.toLowerCase() === 'mcf') {
+ if (bcgovcode?.toLowerCase() === 'mcf' && requestType == "personal") {
let labelGroups = {};
let alldocids = [];
@@ -212,7 +216,9 @@ const useSaveResponsePackage = () => {
_instance,
documentList,
pageMappedDocs,
- pageFlags
+ pageFlags,
+ feeOverrideReason,
+ requestType,
) => {
const downloadType = "pdf";
let zipServiceMessage = {
@@ -223,6 +229,8 @@ const useSaveResponsePackage = () => {
bcgovcode: "",
summarydocuments: {} ,
redactionlayerid: currentLayer.redactionlayerid,
+ pdfstitchjobattributes:{"feeoverridereason":""},
+ requesttype: requestType
};
getResponsePackagePreSignedUrl(
foiministryrequestid,
@@ -231,7 +239,8 @@ const useSaveResponsePackage = () => {
const toastID = toast.loading("Start generating final package...");
zipServiceMessage.requestnumber = res.requestnumber;
zipServiceMessage.bcgovcode = res.bcgovcode;
- zipServiceMessage.summarydocuments= prepareresponseredlinesummarylist(documentList,zipServiceMessage.bcgovcode)
+ zipServiceMessage.summarydocuments= prepareresponseredlinesummarylist(documentList,zipServiceMessage.bcgovcode, requestType)
+ zipServiceMessage.pdfstitchjobattributes= {"feeoverridereason":feeOverrideReason}
let annotList = annotationManager.getAnnotationsList();
annotationManager.ungroupAnnotations(annotList);
/** remove duplicate and not responsive pages */
@@ -258,9 +267,23 @@ const useSaveResponsePackage = () => {
/**must apply redactions before removing pages*/
if (pagesToRemove.length > 0) {
await doc.removePages(pagesToRemove);
- }
+ }
+ doc.setWatermark({
+ diagonal: {
+ text: ''
+ }
+ })
const { PDFNet } = _instance.Core;
PDFNet.initialize();
+
+ // remove bookmarks
+ var pdfdoc = await doc.getPDFDoc()
+ var bookmark = await pdfdoc.getFirstBookmark();
+ while (bookmark && await bookmark.isValid()) {
+ bookmark.delete();
+ bookmark = await pdfdoc.getFirstBookmark();
+ }
+
await stampPageNumberResponse(documentViewer, PDFNet);
toast.update(toastID, {
render: "Saving section stamps...",
@@ -291,6 +314,7 @@ const useSaveResponsePackage = () => {
annotationList: filteredAnnotations,
widgets: true,
});
+
/** apply redaction and save to s3 - xfdfString is needed to display
* the freetext(section name) on downloaded file.*/
doc
@@ -331,9 +355,9 @@ const useSaveResponsePackage = () => {
(Object.keys(res.attributes).length > 0 && 'personalattributes' in res.attributes && Object.keys(res.attributes?.personalattributes).length > 0) ? res.attributes.personalattributes: {},
res.documentid
);
- // setTimeout(() => {
- // window.location.reload(true);
- // }, 3000);
+ setTimeout(() => {
+ window.location.reload(true);
+ }, 3000);
},
(_err) => {
console.log(_err);
@@ -360,8 +384,8 @@ const useSaveResponsePackage = () => {
};
const checkSavingFinalPackage = (redlineReadyAndValid, instance) => {
const validFinalPackageStatus = requestStatus === RequestStates["Response"];
- setEnableSavingFinal(true)
- //setEnableSavingFinal(redlineReadyAndValid && validFinalPackageStatus);
+ //setEnableSavingFinal(true)
+ setEnableSavingFinal(redlineReadyAndValid && validFinalPackageStatus);
if (instance) {
const document = instance.UI.iframeWindow.document;
document.getElementById("final_package").disabled =
diff --git a/web/src/components/FOI/Home/CustomTreeView.tsx b/web/src/components/FOI/Home/CustomTreeView.tsx
index 4e9576bc6..b0b7f3dfd 100644
--- a/web/src/components/FOI/Home/CustomTreeView.tsx
+++ b/web/src/components/FOI/Home/CustomTreeView.tsx
@@ -63,7 +63,6 @@ const CustomTreeView = React.memo(React.forwardRef(({
useImperativeHandle(ref, () => ({
async scrollToPage(event: any, newExpandedItems: string[], pageId: string) {
-
setExpandedItems([...new Set(expandedItems.concat(newExpandedItems))]);
await new Promise(resolve => setTimeout(resolve, 400)); // wait for expand animation to finish
apiRef.current?.focusItem(event, pageId)
@@ -73,10 +72,9 @@ const CustomTreeView = React.memo(React.forwardRef(({
scrollLeftPanelPosition(event: any)
{
let _lastselected = localStorage.getItem("lastselected")
-
if(_lastselected)
{
- let _docid = JSON.parse(_lastselected)?.docid
+ let _docid = JSON.parse(_lastselected)?.docid
let docidstring = ''
if(_lastselected.indexOf('division')>-1)
{
@@ -126,7 +124,6 @@ const CustomTreeView = React.memo(React.forwardRef(({
let selectedNodes = [];
for (let nodeId of nodeIds) {
nodeId = nodeId.replace(/undefined/g, "null");
- console.log("nodeId:",nodeId)
let node = JSON.parse(nodeId);
selectedNodes.push(node);
if (node.page) {
@@ -180,6 +177,7 @@ const CustomTreeView = React.memo(React.forwardRef(({
// props.itemId = props?.itemId?.replaceAll("undefined", "\"\"");
// let itemid = JSON.parse(props?.itemId);
+ //console.log("CustomTreeItem-",props)
const derivedItemId = props.itemId?.replaceAll("undefined", "\"\"") ?? "";
// Parse the derived itemId
let itemid:any;
@@ -211,7 +209,6 @@ const CustomTreeView = React.memo(React.forwardRef(({
const openContextMenu = (e: any, props: any) => {
if (props.children && requestInfo.bcgovcode !== "MCF" && requestInfo.requesttype !== "personal") return
- // console.log("contextmenu")
e.preventDefault();
let nodeId: string = e.target.parentElement.parentElement.id;
if (nodeId === "") {
@@ -227,7 +224,6 @@ const CustomTreeView = React.memo(React.forwardRef(({
return f.documentid === nodeIdJson.docid;
});
setCurrentEditRecord(currentFiles[0]);
- // console.log("selected file: ", currentFiles[0]);
setActiveNode(nodeIdJson);
} else { //mcf personal level 1 tree item
diff --git a/web/src/components/FOI/Home/DocumentSelector.scss b/web/src/components/FOI/Home/DocumentSelector.scss
index 6344ecde6..331b249aa 100644
--- a/web/src/components/FOI/Home/DocumentSelector.scss
+++ b/web/src/components/FOI/Home/DocumentSelector.scss
@@ -12,6 +12,7 @@
.heading {
margin-left: 16px;
margin-bottom:12px;
+ padding-right: 4px;
}
}
@@ -329,5 +330,14 @@
color: #38598A;
}
.editPersonalTags {
+ padding: 4px 0px;
cursor: pointer;
}
+.editPersonalTags:hover {
+ background-color: #f2f2f2;
+}
+.editPersonalTagsDisabled {
+ padding: 4px 0px;
+ cursor: not-allowed;
+ color: #cfcfcf;
+}
\ No newline at end of file
diff --git a/web/src/components/FOI/Home/DocumentSelector.tsx b/web/src/components/FOI/Home/DocumentSelector.tsx
index 731411b44..c6c0d7b43 100644
--- a/web/src/components/FOI/Home/DocumentSelector.tsx
+++ b/web/src/components/FOI/Home/DocumentSelector.tsx
@@ -80,7 +80,24 @@ const DocumentSelector = React.memo(
(f: any) => f.documentid === lookup.docid
);
let pageId, newExpandedItems;
- if (organizeBy === "lastmodified") {
+ if(requestInfo.bcgovcode === "MCF" && requestInfo.requesttype === "personal"){
+ let label = file.attributes.personalattributes.person + ' - ' + file.attributes.personalattributes.filetype;
+ if (file.attributes.personalattributes.trackingid) {
+ label += ' - ' + file.attributes.personalattributes.trackingid;
+ }
+ if (file.attributes.personalattributes.volume) {
+ label += ' - ' + file.attributes.personalattributes.volume;
+ }
+ pageId = `{"filevolume": "${label}", "docid": ${file.documentid}, "page": ${
+ lookup.page
+ }, "flagid": [${getPageFlagIds(file.pageFlag, lookup.page)}], "title": "${getFlagName(file, lookup.page)}"}`;
+
+ newExpandedItems = [
+ '{"filevolume": "' + label + '"}',
+ '{"filevolume": "' + label + '", "docid": ' + lookup.docid + '}'
+ ];
+ }
+ else if (organizeBy === "lastmodified") {
pageId = `{"docid": ${file.documentid}, "page": ${
lookup.page
}, "flagid": [${getPageFlagIds(
@@ -106,7 +123,6 @@ const DocumentSelector = React.memo(
"}",
];
}
-
treeRef?.current?.scrollToPage(event, newExpandedItems, pageId);
},
@@ -311,12 +327,24 @@ const DocumentSelector = React.memo(
const onFilterChange = (filterValue: string) => {
- setFilesForDisplay(
- files.filter((file: any) => file.filename.includes(filterValue))
- );
- setFilteredFiles(
- files.filter((file: any) => file.filename.includes(filterValue))
- );
+ if(requestInfo.bcgovcode === "MCF" && requestInfo.requesttype === "personal"){
+ let filtered = files.filter((file: any) => {
+ const personalAttributes = file.attributes.personalattributes;
+ return Object.values(personalAttributes).some((value: any) =>
+ value.toLowerCase().includes(filterValue.toLowerCase())
+ );
+ })
+ setFilesForDisplay(filtered);
+ setFilteredFiles(filtered);
+ }
+ else{
+ setFilesForDisplay(
+ files.filter((file: any) => file.filename.includes(filterValue))
+ );
+ setFilteredFiles(
+ files.filter((file: any) => file.filename.includes(filterValue))
+ );
+ }
};
const selectTreeItem = (docid: any, page: number) => {
@@ -586,7 +614,20 @@ const DocumentSelector = React.memo(
});
}
});
- if (organizeBy === "lastmodified") {
+ if (requestInfo.bcgovcode === "MCF" && requestInfo.requesttype === "personal") {
+ return filteredpages.map((p: any) => {
+ return {
+ id: `{"filevolume": "${division}", "docid": ${
+ file.documentid
+ }, "page": ${p}, "flagid": [${getPageFlagIds(
+ file.pageFlag,
+ p
+ )}], "title": "${getFlagName(file, p)}"}`,
+ label: getPageLabel(file, p),
+ };
+ });
+ }
+ else if (organizeBy === "lastmodified") {
return filteredpages.map((p: any) => {
return {
id: `{"docid": ${
@@ -623,33 +664,46 @@ const DocumentSelector = React.memo(
// }
// }
} else {
- if (organizeBy === "lastmodified") {
- return file.pages.map((p: any) => {
- return {
- id: `{"docid": ${
- file.documentid
- }, "page": ${p}, "flagid": [${getPageFlagIds(
- file.pageFlag,
- p
- )}], "title": "${getFlagName(file, p)}"}`,
- label: getPageLabel(file, p),
- };
- });
- } else {
- return file.pages.map((p: any) => {
- return {
- id: `{"division": ${division?.divisionid}, "docid": ${
- file.documentid
- }, "page": ${p}, "flagid": [${getPageFlagIds(
- file.pageFlag,
- p
- )}], "title": "${getFlagName(file, p)}"}`,
- label: getPageLabel(file, p),
- };
- });
- }
+ if (requestInfo.bcgovcode === "MCF" && requestInfo.requesttype === "personal") {
+ return file.pages.map((p: any) => {
+ return {
+ id: `{"filevolume": "${division}", "docid": ${
+ file.documentid
+ }, "page": ${p}, "flagid": [${getPageFlagIds(
+ file.pageFlag,
+ p
+ )}], "title": "${getFlagName(file, p)}"}`,
+ label: getPageLabel(file, p),
+ };
+ });
+ }
+ else if (organizeBy === "lastmodified") {
+ return file.pages.map((p: any) => {
+ return {
+ id: `{"docid": ${
+ file.documentid
+ }, "page": ${p}, "flagid": [${getPageFlagIds(
+ file.pageFlag,
+ p
+ )}], "title": "${getFlagName(file, p)}"}`,
+ label: getPageLabel(file, p),
+ };
+ });
+ } else {
+ return file.pages.map((p: any) => {
+ return {
+ id: `{"division": ${division?.divisionid}, "docid": ${
+ file.documentid
+ }, "page": ${p}, "flagid": [${getPageFlagIds(
+ file.pageFlag,
+ p
+ )}], "title": "${getFlagName(file, p)}"}`,
+ label: getPageLabel(file, p),
+ };
+ });
+ }
}
- }
+ }
const getTreeItems = () => {
if (pageFlags) {
@@ -674,9 +728,9 @@ const DocumentSelector = React.memo(
index = tree.length - 1
}
tree[index].children.push({
- id: `{"docid": ${file.documentid}}`,
+ id: `{"filevolume": "${label}", "docid": ${file.documentid}}`,
label: (file.attributes.personalattributes.personaltag || 'TBD') + ' (' + file.pages.length + ')',
- children: getFilePages(file)
+ children: getFilePages(file, label)
})
}
return tree;
diff --git a/web/src/components/FOI/Home/FOIPPASectionsModal.js b/web/src/components/FOI/Home/FOIPPASectionsModal.js
index e78d83df7..48dab0601 100644
--- a/web/src/components/FOI/Home/FOIPPASectionsModal.js
+++ b/web/src/components/FOI/Home/FOIPPASectionsModal.js
@@ -32,7 +32,9 @@ export const FOIPPASectionsModal= ({
saveRedaction,
defaultSections,
saveDefaultSections,
- clearDefaultSections
+ clearDefaultSections,
+ pageSelectionsContainNRDup,
+ setMessageModalOpen
}) => {
const [modalSortNumbered, setModalSortNumbered] = useState(false);
@@ -101,6 +103,15 @@ export const FOIPPASectionsModal= ({
return b.count - a.count;
}
};
+
+ const handleSelectCodes = () => {
+ if (editRedacts) {
+ saveRedactions();
+ } else {
+ saveRedaction();
+ }
+ pageSelectionsContainNRDup ? setMessageModalOpen(true) : setMessageModalOpen(false);
+ }
return(
@@ -187,7 +198,7 @@ export const FOIPPASectionsModal= ({
Select Code(s)
diff --git a/web/src/components/FOI/Home/FeeOverrideModal.jsx b/web/src/components/FOI/Home/FeeOverrideModal.jsx
new file mode 100644
index 000000000..379a78a3d
--- /dev/null
+++ b/web/src/components/FOI/Home/FeeOverrideModal.jsx
@@ -0,0 +1,90 @@
+import ReactModal from "react-modal-resizable-draggable";
+import DialogActions from "@mui/material/DialogActions";
+import DialogContent from "@mui/material/DialogContent";
+import DialogContentText from "@mui/material/DialogContentText";
+import DialogTitle from "@mui/material/DialogTitle";
+import CloseIcon from "@mui/icons-material/Close";
+import IconButton from "@mui/material/IconButton";
+
+const FeeOverrideModal = ({
+ modalData,
+ cancelRedaction,
+ outstandingBalanceModal,
+ cancelSaveRedlineDoc,
+ isOverride,
+ feeOverrideReason,
+ handleOverrideReasonChange,
+ saveDoc,
+ overrideOutstandingBalance,
+}) => {
+ return (
+
+
+ {modalData?.modalTitle}
+
+ Close
+
+
+
+
+
+
+ {modalData?.modalMessage}
+ {isOverride && (
+ <>
+
+
+ Reason for the override :
+
+ >
+ )}
+
+
+
+
+ {!isOverride && (
+
+ {modalData?.modalButtonLabel}
+
+ )}
+ {isOverride && (
+
+ Continue
+
+ )}
+
+ Cancel
+
+
+
+ );
+};
+
+export default FeeOverrideModal;
diff --git a/web/src/components/FOI/Home/Home.js b/web/src/components/FOI/Home/Home.js
index c018633fb..233fb64dc 100644
--- a/web/src/components/FOI/Home/Home.js
+++ b/web/src/components/FOI/Home/Home.js
@@ -45,6 +45,8 @@ function Home() {
const [warningModalOpen, setWarningModalOpen] = useState(false);
const [divisions, setDivisions] = useState([]);
const [pageFlags, setPageFlags]= useState([]);
+ const [isBalanceFeeOverrode , setIsBalanceFeeOverrode] = useState(false);
+ const [outstandingBalance, setOutstandingBalance]= useState(0);
const redliningRef = useRef();
const selectorRef = useRef();
@@ -66,17 +68,19 @@ function Home() {
fetchDocuments(
parseInt(foiministryrequestid),
- async (data, documentDivisions, _requestInfo) => {
+ async (documents, documentDivisions, _requestInfo) => {
setDivisions(documentDivisions);
+ setOutstandingBalance(_requestInfo.outstandingbalance)
+ setIsBalanceFeeOverrode(_requestInfo.balancefeeoverrodforrequest)
const getFileExt = (filepath) => {
const parts = filepath.split(".")
const fileExt = parts.pop()
return fileExt
}
// New code added to get the incompatable files for download redline
- // data has all the files including incompatable ones
+ // documents has all the files including incompatable ones
// _files has all files except incompatable ones
- const _incompatableFiles = data.filter(
+ const _incompatableFiles = documents.filter(
(d) => {
const isPdfFile = getFileExt(d.filepath) === "pdf"
if (isPdfFile) {
@@ -87,7 +91,7 @@ function Home() {
}
);
setIncompatibleFiles(_incompatableFiles);
- const _files = data.filter((d) => {
+ const _files = documents.filter((d) => {
const isPdfFile = getFileExt(d.filepath) === "pdf"
const isCompatible = !d.attributes.incompatible || isPdfFile
return isCompatible
@@ -105,11 +109,10 @@ function Home() {
});
let doclist = [];
- let requestInfo = _requestInfo;
+ let requestInfo = _requestInfo.requestinfo;
getFOIS3DocumentPreSignedUrls(
documentObjs,
(newDocumentObjs) => {
- console.log(requestInfo)
sortDocList(newDocumentObjs, null, doclist, requestInfo);
//prepareMapperObj will add sortorder, stitchIndex and totalPageCount to doclist
//and prepare the PageMappedDocs object
@@ -300,9 +303,10 @@ function Home() {
incompatibleFiles={incompatibleFiles}
setWarningModalOpen={setWarningModalOpen}
scrollLeftPanel={scrollLeftPanel}
+ isBalanceFeeOverrode={isBalanceFeeOverrode}
+ outstandingBalance={outstandingBalance}
pageFlags={pageFlags}
syncPageFlagsOnAction={syncPageFlagsOnAction}
-
/>
)
// : Loading
diff --git a/web/src/components/FOI/Home/MCFPersonal.js b/web/src/components/FOI/Home/MCFPersonal.js
index 258ec1019..173ccb264 100644
--- a/web/src/components/FOI/Home/MCFPersonal.js
+++ b/web/src/components/FOI/Home/MCFPersonal.js
@@ -26,7 +26,7 @@ const MCFPersonal = ({
setEditTagModalOpen,
setOpenContextPopup,
setNewDivision,
- // tagValue,
+ comparePersonalAttributes,
curPersonalAttributes,
setNewPersonalAttributes,
updatePersonalAttributes,
@@ -69,16 +69,29 @@ const MCFPersonal = ({
const [fileTypeSearchValue, setFileTypeSearchValue] = useState("");
const [additionalFileTypes, setAdditionalFileTypes] = useState([]);
const [showAdditionalFileTypes, setShowAdditionalFileTypes] = useState(false);
+ const [disableSave, setDisableSave] = useState(false);
useEffect(() => {
setPersonalAttributes(curPersonalAttributes);
},[curPersonalAttributes])
+ useEffect(() => {
+ setDisableSave(
+ personalAttributes?.person === undefined
+ || personalAttributes?.person === ""
+ || personalAttributes?.filetype === undefined
+ || personalAttributes?.filetype === ""
+ || personalAttributes?.trackingid === undefined
+ || personalAttributes?.trackingid === ""
+ || comparePersonalAttributes(personalAttributes, curPersonalAttributes)
+ );
+ },[personalAttributes])
+
useEffect(() => {
if(MCFSections?.sections) {
if(MCFSections.sections.length > MCFPopularSections-1) {
setTagList(MCFSections.sections.slice(0, MCFPopularSections-1));
- setOtherTagList(MCFSections.sections.slice(MCFPopularSections));
+ setOtherTagList(MCFSections.sections.slice(MCFPopularSections-1));
} else {
setTagList(MCFSections.sections);
setOtherTagList([]);
@@ -97,7 +110,7 @@ const MCFPersonal = ({
},[MCFPeople])
useEffect(() => {
- if(MCFVolumes?.volumes) {
+ if(!!MCFFiletypes && MCFVolumes?.volumes) {
if(MCFFiletypes.filetypes.length > 5) {
setVolumes(MCFVolumes.volumes.slice(0, 5));
} else {
@@ -109,8 +122,8 @@ const MCFPersonal = ({
useEffect(() => {
if(MCFFiletypes?.filetypes) {
if(MCFFiletypes.filetypes.length > 6) {
- setFileTypes(MCFFiletypes.filetypes.slice(0, 6));
- setOtherFileTypes(MCFFiletypes.filetypes.slice(6, MCFFiletypes.filetypes.length))
+ setFileTypes(MCFFiletypes.filetypes.slice(0, 8));
+ setOtherFileTypes(MCFFiletypes.filetypes.slice(8, MCFFiletypes.filetypes.length))
} else {
setFileTypes(MCFFiletypes.filetypes);
setOtherFileTypes([])
@@ -152,6 +165,16 @@ const MCFPersonal = ({
}
},[showAllPeople, showAllVolumes])
+ React.useEffect(() => {
+ if(MCFPeople.people.length > 0 && personalAttributes.person !== "") {
+ setShowAllPeople( MCFPeople.people.filter(p => p.name==personalAttributes.person)[0]?.sortorder >= 5 );
+ }
+
+ if(MCFVolumes.volumes.length > 0 && personalAttributes.volume !== "") {
+ setShowAllVolumes( MCFVolumes.volumes.filter(v => v.name==personalAttributes.volume)[0]?.sortorder >= 5 );
+ }
+ },[personalAttributes])
+
React.useEffect(() => {
setAdditionalFileTypes(searchFileTypes(otherFileTypes, fileTypeSearchValue, personalAttributes?.filetype));
},[fileTypeSearchValue, otherFileTypes, personalAttributes])
@@ -166,7 +189,7 @@ const MCFPersonal = ({
_sectionArray.map((section) => {
if(_keyword && section.name.toLowerCase().includes(_keyword.toLowerCase())) {
newSectionArray.push(section);
- } else if(section.divisionid === _selectedSectionValue) {
+ } else if(section.name === _selectedSectionValue) {
newSectionArray.unshift(section);
}
});
@@ -221,6 +244,8 @@ const MCFPersonal = ({
};
const handleClose = () => {
+ setSearchValue("");
+ setFileTypeSearchValue("");
setCurrentEditRecord();
setCurPersonalAttributes({
person: "",
@@ -234,6 +259,11 @@ const MCFPersonal = ({
setOpenContextPopup(false);
};
+ const reset = () => {
+ setSearchValue("");
+ setFileTypeSearchValue("");
+ };
+
const handleFileTypeSearchKeywordChange = (keyword) => {
setFileTypeSearchValue(keyword);
}
@@ -583,13 +613,15 @@ const MCFPersonal = ({
updatePersonalAttributes()}
+ onClick={() => {updatePersonalAttributes();reset();}}
+ disabled={disableSave}
>
Update for Individual
updatePersonalAttributes(true)}
+ onClick={() => {updatePersonalAttributes(true);reset();}}
+ disabled={disableSave}
>
Update for All
diff --git a/web/src/components/FOI/Home/Redlining.js b/web/src/components/FOI/Home/Redlining.js
index 3fd5fc417..4b02a814c 100644
--- a/web/src/components/FOI/Home/Redlining.js
+++ b/web/src/components/FOI/Home/Redlining.js
@@ -53,10 +53,12 @@ import {
createFinalPackageSelection,
createOIPCForReviewSelection,
createRedlineForSignOffSelection,
- createResponsePDFMenu,
+ createResponsePDFMenu,
+ createConsultPackageSelection,
handleFinalPackageClick,
handleRedlineForOipcClick,
handleRedlineForSignOffClick,
+ handleConsultPackageClick,
renderCustomButton,
isValidRedlineDownload,
isReadyForSignOff } from "./CreateResponsePDF/CreateResponsePDF";
@@ -66,6 +68,7 @@ import {ConfirmationModal} from "./ConfirmationModal";
import { FOIPPASectionsModal } from "./FOIPPASectionsModal";
import { NRWarningModal } from "./NRWarningModal";
import Switch from "@mui/material/Switch";
+import FeeOverrideModal from "./FeeOverrideModal";
const Redlining = React.forwardRef(
(
@@ -81,8 +84,10 @@ const Redlining = React.forwardRef(
incompatibleFiles,
setWarningModalOpen,
scrollLeftPanel,
+ isBalanceFeeOverrode,
+ outstandingBalance,
pageFlags,
- syncPageFlagsOnAction
+ syncPageFlagsOnAction,
},
ref
) => {
@@ -101,6 +106,7 @@ const Redlining = React.forwardRef(
const currentLayer = useSelector((state) => state.documents?.currentLayer);
const deletedDocPages = useAppSelector((state) => state.documents?.deletedDocPages);
const validoipcreviewlayer = useAppSelector((state) => state.documents?.requestinfo?.validoipcreviewlayer);
+ const requestType = useAppSelector((state) => state.documents?.requestinfo?.requesttype);
const viewer = useRef(null);
const [documentList, setDocumentList] = useState([]);
@@ -137,9 +143,15 @@ const Redlining = React.forwardRef(
const [modalData, setModalData] = useState(null);
const [enableRedactionPanel, setEnableRedactionPanel] = useState(false);
const [clickRedactionPanel, setClickRedactionPanel] = useState(false);
+
const [pagesRemoved, setPagesRemoved] = useState([]);
const [redlineModalOpen, setRedlineModalOpen] = useState(false);
const [isDisableNRDuplicate, setIsDisableNRDuplicate] = useState(false);
+ const [pageSelectionsContainNRDup, setPageSelectionsContainNRDup] = useState(false);
+ const [outstandingBalanceModal, setOutstandingBalanceModal] = useState(false);
+ const [isOverride, setIsOverride]= useState(false);
+ const [feeOverrideReason, setFeeOverrideReason]= useState("");
+
//xml parser
const parser = new XMLParser();
/**Response Package && Redline download and saving logic (react custom hooks)*/
@@ -151,10 +163,19 @@ const Redlining = React.forwardRef(
saveRedlineDocument,
enableSavingOipcRedline,
enableSavingRedline,
+ enableSavingConsults,
checkSavingRedline,
checkSavingOIPCRedline,
+ checkSavingConsults,
setRedlineCategory,
setFilteredComments,
+ setSelectedPublicBodyIDs,
+ setConsultApplyRedactions,
+ selectedPublicBodyIDs,
+ documentPublicBodies,
+ consultApplyRedactions,
+ setConsultApplyRedlines,
+ consultApplyRedlines,
} = useSaveRedlineForSignoff(docInstance, docViewer);
const {
saveResponsePackage,
@@ -231,6 +252,7 @@ const Redlining = React.forwardRef(
const redlineForSignOffBtn = createRedlineForSignOffSelection(document, enableSavingRedline);
const redlineForOipcBtn = createOIPCForReviewSelection(document, enableSavingOipcRedline);
const finalPackageBtn = createFinalPackageSelection(document, enableSavingFinal);
+ const consultPackageButton = createConsultPackageSelection(document, enableSavingConsults);
redlineForOipcBtn.onclick = () => {
handleRedlineForOipcClick(updateModalData, setRedlineModalOpen);
};
@@ -238,11 +260,16 @@ const Redlining = React.forwardRef(
handleRedlineForSignOffClick(updateModalData, setRedlineModalOpen);
};
finalPackageBtn.onclick = () => {
- handleFinalPackageClick(updateModalData, setRedlineModalOpen);
+ handleFinalPackageClick(updateModalData, setRedlineModalOpen, outstandingBalance,
+ isBalanceFeeOverrode,setOutstandingBalanceModal,setIsOverride);
+ };
+ consultPackageButton.onclick = () => {
+ handleConsultPackageClick(updateModalData, setRedlineModalOpen, setIncludeDuplicatePages, setIncludeNRPages)
};
menu.appendChild(redlineForOipcBtn);
menu.appendChild(redlineForSignOffBtn);
menu.appendChild(finalPackageBtn);
+ menu.appendChild(consultPackageButton);
parent.appendChild(menu);
//Create render function to render custom Create Reseponse PDF button
@@ -444,12 +471,12 @@ const Redlining = React.forwardRef(
})
var x = 0, y = 0
- documentViewer.addEventListener("mouseLeftDown", async (event) => {
+ documentViewer.addEventListener("mouseRightDown", async (event) => {
x = event.pageX;
y = event.pageY;
});
- documentViewer.addEventListener("mouseLeftUp", async (event) => {
+ documentViewer.addEventListener("mouseRightUp", async (event) => {
if (window.Math.abs(event.pageX - x) < 2 && window.Math.abs(event.pageY - y) < 2) {
scrollLeftPanel(event, documentViewer.getCurrentPage());
}
@@ -591,6 +618,7 @@ const Redlining = React.forwardRef(
}, []);
const updateModalData = (newModalData) => {
+ setRedlineCategory(newModalData.modalFor);
setModalData(newModalData);
};
@@ -924,7 +952,7 @@ const Redlining = React.forwardRef(
docversion: displayedDoc.docversion,
isFullPage: isFullPage
}
- const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "delete");
+ const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "delete", pageFlags);
if (pageFlagsUpdated) {
pageFlagObj.push(pageFlagsUpdated);
}
@@ -991,7 +1019,7 @@ const Redlining = React.forwardRef(
let individualPageNo;
await removeRedactAnnotationDocContent(annotations);
-
+
if (annotations[0].Subject === "Redact") {
let pageSelectionList = [...pageSelections];
annots[0].children?.forEach((annotatn, i) => {
@@ -1088,7 +1116,7 @@ const Redlining = React.forwardRef(
docid: displayedDoc.docid,
docversion: displayedDoc.docversion,
}
- const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "add");
+ const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "add", pageFlags);
if (pageFlagsUpdated) {
pageFlagObj.push(pageFlagsUpdated);
}
@@ -1383,6 +1411,7 @@ const Redlining = React.forwardRef(
const validRedlineDownload = isValidRedlineDownload(pageFlags);
const redlineReadyAndValid = readyForSignOff && validRedlineDownload;
const oipcRedlineReadyAndValid = (validoipcreviewlayer === true && currentLayer.name.toLowerCase() === "oipc") && readyForSignOff;
+ checkSavingConsults(documentList, _instance);
checkSavingRedline(redlineReadyAndValid, _instance);
checkSavingOIPCRedline(oipcRedlineReadyAndValid, _instance, readyForSignOff);
checkSavingFinalPackage(redlineReadyAndValid, _instance);
@@ -1396,7 +1425,7 @@ const Redlining = React.forwardRef(
if (docInstance && documentList.length > 0) {
const document = docInstance?.UI.iframeWindow.document;
document.getElementById("create_response_pdf").addEventListener("click", handleCreateResponsePDFClick);
- docViewer.setWatermark({
+ docViewer?.setWatermark({
// Draw custom watermark in middle of the document
custom: (ctx, pageNumber, pageWidth, pageHeight) => {
// ctx is an instance of CanvasRenderingContext2D
@@ -1404,8 +1433,9 @@ const Redlining = React.forwardRef(
// Hence being able to leverage those properties
let originalPage = pageMappedDocs['stitchedPageLookup'][pageNumber]
let doc = pageFlags.find(d => d.documentid === originalPage.docid);
- let pageFlag = doc.pageflag.find(f => f.page === originalPage.page);
- if (pageFlag.flagid === pageFlagTypes["Duplicate"]) {
+ let pageFlagsOnPage = doc?.pageflag?.filter(f => f.page === originalPage.page);
+ let NrOrDupeFlag = pageFlagsOnPage?.find(pageFlagItem => pageFlagItem.flagid === pageFlagTypes["Duplicate"] || pageFlagItem.flagid === pageFlagTypes["Not Responsive"]);
+ if (NrOrDupeFlag?.flagid === pageFlagTypes["Duplicate"]) {
ctx.fillStyle = "#ff0000";
ctx.font = "20pt Arial";
ctx.globalAlpha = 0.4;
@@ -1417,7 +1447,7 @@ const Redlining = React.forwardRef(
ctx.restore();
}
- if (pageFlag.flagid === pageFlagTypes["Not Responsive"]) {
+ if (NrOrDupeFlag?.flagid === pageFlagTypes["Not Responsive"]) {
ctx.fillStyle = "#ff0000";
ctx.font = "20pt Arial";
ctx.globalAlpha = 0.4;
@@ -1430,6 +1460,8 @@ const Redlining = React.forwardRef(
}
},
});
+ docViewer?.refreshAll();
+ docViewer?.updateView();
}
//Cleanup Function: removes previous event listeiner to ensure handleCreateResponsePDFClick event is not called multiple times on click
return () => {
@@ -1523,6 +1555,9 @@ const Redlining = React.forwardRef(
let username = docViewer?.getAnnotationManager()?.getCurrentUser();
for (const entry in annotData) {
let xml = parser.parseFromString(annotData[entry]);
+ // import redactions first, free text later, so translucent redaction won't cover free text
+ let xmlAnnotsChildren_redaction = [];
+ let xmlAnnotsChildren_others = [];
for (let annot of xml.getElementsByTagName("annots")[0].children) {
let txt = domParser.parseFromString(
annot.getElementsByTagName("trn-custom-data")[0].attributes.bytes,
@@ -1536,6 +1571,12 @@ const Redlining = React.forwardRef(
(p) => p.pageNo - 1 === Number(originalPageNo)
)?.stitchedPageNo - 1
)?.toString();
+ if(annot.attributes.subject === "Redact") {
+ xmlAnnotsChildren_redaction.push(annot);
+ } else {
+ xmlAnnotsChildren_others.push(annot);
+ }
+ xml.getElementsByTagName("annots")[0].children = [...xmlAnnotsChildren_redaction, ...xmlAnnotsChildren_others];
}
xml = parser.toString(xml);
const _annotations = await annotManager.importAnnotations(xml);
@@ -1701,7 +1742,7 @@ const Redlining = React.forwardRef(
docid: displayedDoc.docid,
docversion: displayedDoc.docversion,
}
- const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "edit");
+ const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "edit", pageFlags);
if (pageFlagsUpdated) {
pageFlagObj.push(pageFlagsUpdated);
}
@@ -1882,7 +1923,7 @@ const Redlining = React.forwardRef(
pageSelectionList
);
const pageFlagObj = [];
- const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "edit");
+ const pageFlagsUpdated = constructPageFlags(annotationsInfo, exisitngAnnotations, pageMappedDocs, pageFlagTypes, RedactionTypes, "edit", pageFlags);
if (pageFlagsUpdated) {
pageFlagObj.push(pageFlagsUpdated);
}
@@ -2180,8 +2221,14 @@ const Redlining = React.forwardRef(
}, [deleteQueue, newRedaction]);
const cancelRedaction = () => {
- setModalOpen(false);
- setMessageModalOpen(false);
+ if(outstandingBalance > 0 && !isBalanceFeeOverrode){
+ setIsOverride(false)
+ setOutstandingBalanceModal(false)
+ }
+ else{
+ setModalOpen(false);
+ setMessageModalOpen(false);
+ }
setSelectedPageFlagId(null);
setSelectedSections([]);
setSaveDisabled(true);
@@ -2228,13 +2275,48 @@ const Redlining = React.forwardRef(
,
],
});
- setMessageModalOpen(true)
+ setMessageModalOpen(true);
+ }
+
+ const setMessageModalForNrDuplicatePriority = () => {
+ updateModalData({
+ modalTitle: "Selected page(s) currently have NR or Duplicate flag applied",
+ modalMessage: [
+
+ Please note, your redaction(s) have been applied on your selected page(s). However, to flag your selected page(s) as Withheld In Full, you must first change your selected page(s) flags to In Progress.
+ After your selected page(s) are flagged as In Progress you may proceed to mark them as Withheld in Full.
+
+ ],
+ });
}
useEffect(() => {
if (!newRedaction) return;
const astrType = decodeAstr(newRedaction.astr)['trn-redaction-type'] || '';
const hasFullPageRedaction = astrType === "fullPage";
+ // logic to alert the user that a withheld in full pageflag/redaction was applied to a page with an existing duplicate or nr pageflag.
+ let hasNROrDuplicateFlag = false;
+ if (selectedPageFlagId === pageFlagTypes["Withheld in Full"] || hasFullPageRedaction) {
+ const pageFlagsMap = new Map();
+ for (let docPageFlags of pageFlags) {
+ pageFlagsMap.set(docPageFlags.documentid, docPageFlags.pageflag);
+ }
+ for (let pageObj of pageSelections) {
+ if (hasNROrDuplicateFlag) {
+ break;
+ }
+ const pageFlagList = pageFlagsMap.get(pageObj.docid);
+ if (pageFlagList) {
+ for (let flagObj of pageFlagList) {
+ if (flagObj.page === pageObj.page && (flagObj.flagid === pageFlagTypes["Not Responsive"] || flagObj.flagid === pageFlagTypes["Duplicate"])) {
+ hasNROrDuplicateFlag = true;
+ break;
+ }
+ }
+ }
+ }
+ }
+ setPageSelectionsContainNRDup(hasNROrDuplicateFlag);
if (newRedaction.names?.length > REDACTION_SELECT_LIMIT) {
setWarningModalOpen(true);
@@ -2243,6 +2325,9 @@ const Redlining = React.forwardRef(
saveRedaction();
} else if (defaultSections.length == 0 && !hasFullPageRedaction) {
setModalOpen(true);
+ } else if (hasNROrDuplicateFlag) {
+ setModalOpen(true);
+ setMessageModalForNrDuplicatePriority();
} else if (selectedPageFlagId === pageFlagTypes["Withheld in Full"] && defaultSections.length > 0) {
setMessageModalForNotResponsive();
} else if (hasFullPageRedaction) {
@@ -2280,6 +2365,15 @@ const Redlining = React.forwardRef(
const cancelSaveRedlineDoc = () => {
disableNRDuplicate();
setRedlineModalOpen(false);
+ setSelectedPublicBodyIDs([]);
+ setConsultApplyRedactions(false);
+ setConsultApplyRedlines(false);
+ if(outstandingBalance > 0 && !isBalanceFeeOverrode){
+ setOutstandingBalanceModal(false)
+ setIsOverride(false)
+ }
+ else
+ setRedlineModalOpen(false);
};
const handleIncludeNRPages = (e) => {
@@ -2289,8 +2383,35 @@ const Redlining = React.forwardRef(
const handleIncludeDuplicantePages = (e) => {
setIncludeDuplicatePages(e.target.checked);
};
+
+ const handleApplyRedactions = (e) => {
+ setConsultApplyRedactions(e.target.checked);
+ }
+
+ const handleApplyRedlines = (e) => {
+ setConsultApplyRedlines(e.target.checked);
+ if (consultApplyRedactions) {
+ setConsultApplyRedactions(false);
+ }
+ }
+
+ const handleSelectedPublicBodies = (e) => {
+ let publicBodyId = !isNaN(parseInt(e.target.value)) ? parseInt(e.target.value) : e.target.value;
+ if (selectedPublicBodyIDs.includes(publicBodyId)) {
+ setSelectedPublicBodyIDs((prev) => {
+ return [...prev.filter(id => id !== publicBodyId)]
+ });
+ }
+ else {
+ setSelectedPublicBodyIDs((prev) => {
+ return [...prev, publicBodyId]
+ });
+ }
+ }
const saveDoc = () => {
+ setIsOverride(false)
+ setOutstandingBalanceModal(false)
setRedlineModalOpen(false);
setRedlineSaving(true);
let modalFor= modalData? modalData.modalFor : ""
@@ -2300,6 +2421,7 @@ const Redlining = React.forwardRef(
switch (modalFor) {
case "oipcreview":
case "redline":
+ case "consult":
saveRedlineDocument(
docInstance,
modalFor,
@@ -2316,7 +2438,9 @@ const Redlining = React.forwardRef(
docInstance,
documentList,
pageMappedDocs,
- pageFlags
+ pageFlags,
+ feeOverrideReason,
+ requestType,
);
break;
default:
@@ -2334,7 +2458,7 @@ const Redlining = React.forwardRef(
return trnCustomData
}
- const NRID = sections?.find(s => s.section === "Not Responsive")?.id;
+ const NRID = sections?.find(s => s.section === "NR")?.id;
const blankID = sections?.find(s => s.section === "")?.id;
const sectionIsDisabled = (sectionid) => {
@@ -2373,6 +2497,13 @@ const Redlining = React.forwardRef(
return isDisabled
}
+ const overrideOutstandingBalance = () => {
+ setIsOverride(true)
+ }
+ const handleOverrideReasonChange = (event) => {
+ setFeeOverrideReason(event.target.value);
+ };
+
return (
@@ -2385,6 +2516,8 @@ const Redlining = React.forwardRef(
handleSectionSelected={handleSectionSelected}
editRedacts={editRedacts}
saveRedactions={saveRedactions}
+ pageSelectionsContainNRDup={pageSelectionsContainNRDup}
+ setMessageModalOpen={setMessageModalOpen}
saveDisabled={saveDisabled}
saveRedaction={saveRedaction}
defaultSections={defaultSections}
@@ -2403,6 +2536,13 @@ const Redlining = React.forwardRef(
isDisableNRDuplicate={isDisableNRDuplicate}
saveDoc={saveDoc}
modalData={modalData}
+ documentPublicBodies={documentPublicBodies}
+ handleSelectedPublicBodies={handleSelectedPublicBodies}
+ selectedPublicBodyIDs={selectedPublicBodyIDs}
+ consultApplyRedactions={consultApplyRedactions}
+ handleApplyRedactions={handleApplyRedactions}
+ handleApplyRedlines={handleApplyRedlines}
+ consultApplyRedlines={consultApplyRedlines}
/>
}
{messageModalOpen &&
@@ -2412,6 +2552,17 @@ const Redlining = React.forwardRef(
modalData={modalData}
/>
}
+
);
}
diff --git a/web/src/components/FOI/Home/utils.js b/web/src/components/FOI/Home/utils.js
index dbcf9974e..5f7e7dda0 100644
--- a/web/src/components/FOI/Home/utils.js
+++ b/web/src/components/FOI/Home/utils.js
@@ -73,13 +73,16 @@ export const CFDSorting = (a, b) => {
b = b.file;
}
if (a.attributes.personalattributes.person !== b.attributes.personalattributes.person) {
- return (a.attributes.personalattributes.person > b.attributes.personalattributes.person) ? 1 : -1
+ // return (a.attributes.personalattributes.person > b.attributes.personalattributes.person) ? 1 : -1
+ return a.attributes.personalattributes.person.localeCompare(b.attributes.personalattributes.person, undefined, {numeric: true, sensitivity: 'base'})
} else if (a.attributes.personalattributes.filetype !== b.attributes.personalattributes.filetype) {
return (a.attributes.personalattributes.filetype > b.attributes.personalattributes.filetype) ? 1 : -1
} else if (a.attributes.personalattributes.trackingid !== b.attributes.personalattributes.trackingid) {
- return (a.attributes.personalattributes.trackingid > b.attributes.personalattributes.trackingid) ? 1 : -1
+ // return (a.attributes.personalattributes.trackingid > b.attributes.personalattributes.trackingid) ? 1 : -1
+ return a.attributes.personalattributes.trackingid.localeCompare(b.attributes.personalattributes.trackingid, undefined, {numeric: true, sensitivity: 'base'})
} else if (a.attributes.personalattributes.volume !== b.attributes.personalattributes.volume) {
- return (a.attributes.personalattributes.volume > b.attributes.personalattributes.volume) ? 1 : -1
+ // return (a.attributes.personalattributes.volume > b.attributes.personalattributes.volume) ? 1 : -1
+ return a.attributes.personalattributes.volume?a.attributes.personalattributes.volume.localeCompare(b.attributes.personalattributes.volume, undefined, {numeric: true, sensitivity: 'base'}) : -1
}
return Date.parse(a.created_at) - Date.parse(b.created_at);
};
@@ -106,7 +109,7 @@ export const sortDocList = (fullDocList, currentDoc, sortedDocList, requestInfo)
if (childDocList.length == 1) {
sortedChildDocList = childDocList;
} else {
- if (requestInfo.bcgovcode === "MCF") {
+ if (requestInfo?.bcgovcode === "MCF" && requestInfo?.requesttype === "personal") {
sortedChildDocList = childDocList.sort(CFDSorting);
} else {
sortedChildDocList = childDocList.sort(docSorting);
@@ -421,7 +424,8 @@ const constructPageFlagsForDelete = (
exisitngAnnotations,
displayedDoc,
pageFlagTypes,
- redactionType
+ redactionType,
+ pageFlags
) => {
let pagesToUpdate = {};
let found = false;
@@ -432,7 +436,16 @@ const constructPageFlagsForDelete = (
(_annotation) =>
_annotation.getCustomData("trn-redaction-type") == "fullPage"
);
- // full page redaction is always have first priority
+ // NR / Duplicate pageflags takes the first precedence / priority
+ const foundNROrDuplicateFlagObj = findNROrDuplicatePageFlag(pageFlags, displayedDoc, pageFlagTypes);
+ if (foundNROrDuplicateFlagObj) {
+ return {
+ docid: displayedDoc?.docid,
+ page: displayedDoc?.page,
+ flagid: foundNROrDuplicateFlagObj.flagid
+ };
+ }
+ // full page redaction is the next priority / precedence
if (fullPageRedaction.length > 0) {
const fullPageSectionsStr = fullPageRedaction[0].getCustomData("sections");
const fullPageSectionValue = getSectionValue(fullPageSectionsStr);
@@ -500,14 +513,27 @@ const constructPageFlagsForAddOrEdit = (
annotationsInfo,
exisitngAnnotations,
displayedDoc,
- pageFlagTypes
+ pageFlagTypes,
+ pageFlags
) => {
let pagesToUpdate = {};
+ if (annotationsInfo.section === undefined) {
+ return getValidObject(pagesToUpdate); // non redaction annotations do not need page flags automatically applied
+ }
const foundBlank = ["", " "].includes(annotationsInfo.section);
const foundNR = annotationsInfo.section == "NR";
// section with a valid number found
const foundValidSection = !["", " ", "NR"].includes(annotationsInfo.section);
- // add/edit - fullPage takes the precedence
+ // add/edit - NR / Duplicate pageflags takes the first precedence
+ const foundNROrDuplicateFlagObj = findNROrDuplicatePageFlag(pageFlags, displayedDoc, pageFlagTypes);
+ if (foundNROrDuplicateFlagObj) {
+ return {
+ docid: displayedDoc?.docid,
+ page: displayedDoc?.page,
+ flagid: foundNROrDuplicateFlagObj.flagid
+ };
+ }
+ // add/edit - fullPage takes the next precedence
if (annotationsInfo?.redactiontype === "fullPage") {
// addition of full page redaction with blank code return "In Progress" page flag.
if (foundBlank) {
@@ -624,12 +650,14 @@ export const constructPageFlags = (
pageMappedDocs,
pageFlagTypes,
RedactionTypes,
- action = ""
+ action = "",
+ pageFlags = []
) => {
- // 1. always withheld in full takes precedence
- // 2. then, partial disclosure
- // 3. then, NR (full disclosure)
- // 4. lastly, BLANK (in progress)
+ // 1. NR/Dup pageflag takes precedence. If that page flag is applied, no annots made can adjust pageflag
+ // 2. then, withheld in full takes precedence
+ // 3. then, partial disclosure
+ // 4. then, NR (full disclosure)
+ // 5. lastly, BLANK (in progress)
const displayedDoc =
pageMappedDocs.stitchedPageLookup[Number(annotationsInfo.stitchpage) + 1];
// get exisitng FreeText annotations on the page
@@ -643,7 +671,8 @@ export const constructPageFlags = (
annotationsInfo,
_exisitngAnnotations,
displayedDoc,
- pageFlagTypes
+ pageFlagTypes,
+ pageFlags
);
} else if (action === "delete") {
const redactionType = getRedactionType(
@@ -655,14 +684,16 @@ export const constructPageFlags = (
_exisitngAnnotations,
displayedDoc,
pageFlagTypes,
- redactionType
+ redactionType,
+ pageFlags
);
} else {
return constructPageFlagsForAddOrEdit(
annotationsInfo,
_exisitngAnnotations,
displayedDoc,
- pageFlagTypes
+ pageFlagTypes,
+ pageFlags,
);
}
};
@@ -752,3 +783,15 @@ export const skipNRDocument = (documentPageFlags, pagecount, pageFlagTypes) => {
}
return skipdocument;
}
+
+export const findNROrDuplicatePageFlag = (pageFlags, docObj, pageFlagTypes) => {
+ const docPageFlags = pageFlags.find(pageFlagObj => pageFlagObj.documentid === docObj.docid);
+ if (!docPageFlags) {
+ return false;
+ }
+ for (let pageFlag of docPageFlags.pageflag) {
+ if ((pageFlag.page === docObj.page && pageFlag.flagid === pageFlagTypes["Duplicate"]) || (pageFlag.page === docObj.page && pageFlag.flagid === pageFlagTypes["Not Responsive"])) {
+ return pageFlag;
+ }
+ }
+}
diff --git a/web/src/constants/constants.ts b/web/src/constants/constants.ts
index d101b000b..1a76564a0 100644
--- a/web/src/constants/constants.ts
+++ b/web/src/constants/constants.ts
@@ -10,7 +10,7 @@ export const KEYCLOAK_REALM =
export const KEYCLOAK_URL = window._env_?.REACT_APP_KEYCLOAK_URL ?? process.env.REACT_APP_KEYCLOAK_URL ?? "https://dev.oidc.gov.bc.ca";
export const KEYCLOAK_AUTH_URL = `${KEYCLOAK_URL}/auth`;
export const ANONYMOUS_USER = "anonymous";
-export const SESSION_SECURITY_KEY = "u7x!A%D*G-KaNdRgUkXp2s5v8y/B?E(H";
+export const SESSION_SECURITY_KEY = window._env_?.REACT_APP_SESSION_SECURITY_KEY ?? process.env.REACT_APP_SESSION_SECURITY_KEY;
export const SESSION_LIFETIME = 21600000;
export const PDFVIEWER_DISABLED_FEATURES= window._env_?.REACT_APP_PDFVIEWERDISABLED ??
process.env.REACT_APP_PDFVIEWERDISABLED ??
diff --git a/web/src/constants/enum.ts b/web/src/constants/enum.ts
index 2338be921..db5f0f832 100644
--- a/web/src/constants/enum.ts
+++ b/web/src/constants/enum.ts
@@ -100,7 +100,7 @@ const RedactionTypes: RedactionType = {
"blank": "blank"
};
-const MCFPopularSections = 23
+const MCFPopularSections = 21
export {
KCProcessingTeams,
diff --git a/web/src/modules/documentReducer.ts b/web/src/modules/documentReducer.ts
index 302df096f..be98b7a97 100644
--- a/web/src/modules/documentReducer.ts
+++ b/web/src/modules/documentReducer.ts
@@ -8,7 +8,8 @@ const initialState = {
"description": "Redline",
// "sortorder": 1,
// "count": 0
- }
+ },
+ allPublicBodies: [],
}
const documents = (state = initialState, action:any)=> {
@@ -41,6 +42,8 @@ const documents = (state = initialState, action:any)=> {
return {...state, redactionLayers: state.redactionLayers };
case ACTION_CONSTANTS.SET_DELETED_PAGES:
return {...state, deletedDocPages: action.payload};
+ case ACTION_CONSTANTS.SET_PUBLIC_BODIES:
+ return {...state, allPublicBodies: action.payload};
case ACTION_CONSTANTS.FOI_PERSONAL_SECTIONS:
return { ...state, foiPersonalSections: action.payload };
case ACTION_CONSTANTS.FOI_PERSONAL_PEOPLE: