Skip to content

Commit

Permalink
Merge pull request #826 from bcgov/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
sumathi-thirumani authored Feb 28, 2024
2 parents 7b31f2c + 62cb840 commit 2cdff9c
Show file tree
Hide file tree
Showing 57 changed files with 1,943 additions and 28 deletions.
106 changes: 106 additions & 0 deletions .github/workflows/documentservice-cd.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
name: Document Services CD


on:
push:
branches:
- dev
- main
- dev-marshal
- test-marshal
- dev-rook
- test-rook
paths:
- "computingservices/DocumentServices/**"
- ".github/workflows/documentservice-cd.yml"

defaults:
run:
shell: bash
working-directory: ./computingservices/DocumentServices

env:
APP_NAME: "reviewer-documentservice"
TOOLS_NAME: "${{secrets.OPENSHIFT4_REPOSITORY}}"

jobs:
documentServices-cd-by-push:
runs-on: ubuntu-20.04

if: github.event_name == 'push' && github.repository == 'bcgov/foi-docreviewer'
steps:
- uses: actions/checkout@v2
- name: Set ENV variables for dev branch
if: ${{ github.ref_name == 'dev' }}
shell: bash
run: |
echo "For ${{ github.ref_name }} branch"
echo "TAG_NAME=dev" >> $GITHUB_ENV
echo "BRANCH_NAME=dev" >> $GITHUB_ENV
echo "ENV_NAME=dev" >> $GITHUB_ENV
- name: Set ENV variables for main branch
if: ${{ github.ref_name == 'main' }}
shell: bash
run: |
echo "For ${{ github.ref_name }} branch"
echo "TAG_NAME=test" >> $GITHUB_ENV
echo "BRANCH_NAME=main" >> $GITHUB_ENV
echo "ENV_NAME=test" >> $GITHUB_ENV
- name: Set ENV variables for dev-marshal branch
if: ${{ github.ref_name == 'dev-marshal' }}
run: |
echo "For ${{ github.ref_name }} branch"
echo "TAG_NAME=dev-marshal" >> $GITHUB_ENV
echo "BRANCH_NAME=dev-marshal" >> $GITHUB_ENV
echo "ENV_NAME=dev" >> $GITHUB_ENV
- name: Set ENV variables for test-marshal branch
if: ${{ github.ref_name == 'test-marshal' }}
run: |
echo "For ${{ github.ref_name }} branch"
echo "TAG_NAME=test-marshal" >> $GITHUB_ENV
echo "BRANCH_NAME=test-marshal" >> $GITHUB_ENV
echo "ENV_NAME=test" >> $GITHUB_ENV
- name: Set ENV variables for dev-rook branch
if: ${{ github.ref_name == 'dev-rook' }}
run: |
echo "For ${{ github.ref_name }} branch"
echo "TAG_NAME=dev-rook" >> $GITHUB_ENV
echo "BRANCH_NAME=dev-rook" >> $GITHUB_ENV
echo "ENV_NAME=dev" >> $GITHUB_ENV
- name: Set ENV variables for test-rook branch
if: ${{ github.ref_name == 'test-rook' }}
run: |
echo "For ${{ github.ref_name }} branch"
echo "TAG_NAME=test-rook" >> $GITHUB_ENV
echo "BRANCH_NAME=test-rook" >> $GITHUB_ENV
echo "ENV_NAME=test" >> $GITHUB_ENV
- name: Login Openshift
shell: bash
run: |
oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}}
- name: Tools project
shell: bash
run: |
oc project ${{ env.TOOLS_NAME }}-tools
- name: Build from ${{ env.BRANCH_NAME }} branch
shell: bash
run: |
oc patch bc/${{ env.APP_NAME }}-build -p '{"spec":{"source":{"contextDir":"/computingservices/DocumentServices","git":{"ref":"${{ env.BRANCH_NAME }}"}}}}'
- name: Start Build Openshift
shell: bash
run: |
oc start-build ${{ env.APP_NAME }}-build --wait
- name: Tag+Deploy for ${{ env.TAG_NAME }}
shell: bash
run: |
oc tag ${{ env.APP_NAME }}:latest ${{ env.APP_NAME }}:${{ env.TAG_NAME }}
54 changes: 54 additions & 0 deletions .github/workflows/documentservice-ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
name: Document Services CI


on:
pull_request:
branches:
- main
- dev
- dev-marshal
- test-marshal
- dev-rook
- test-rook
paths:
- "computingservices/DocumentServices/**"

defaults:
run:
shell: bash
working-directory: ./computingservices/DocumentServices

jobs:
docker-build-check:
runs-on: ubuntu-20.04
name: Build dockerfile to ensure it works

steps:
- uses: actions/checkout@v2
- name: docker build to check strictness
id: docker-build
run: |
docker build -f Dockerfile.local .
python-build-check:
runs-on: ubuntu-20.04
name: Build python to ensure it works

strategy:
matrix:
# python-version: [3.6, 3.7, 3.8, 3.9]
python-version: [3.9]

steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8 pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
2 changes: 1 addition & 1 deletion api/reviewer_api/resources/foiflowmasterdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ def post(ministryrequestid, redactionlayer="redline", layertype="redline"):
)
singlepkgpath = s3path_save
data["s3path_save"] = s3path_save

if is_single_redline_package(_bcgovcode, packagetype):
for div in data["divdocumentList"]:
if len(div["documentlist"]) > 0:
Expand Down
10 changes: 10 additions & 0 deletions api/reviewer_api/schemas/finalpackage.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,14 @@ class AttributeSchema(Schema):
files = fields.Nested(FileSchema, many=True, required=True, allow_none=False)


class SummaryPkgSchema(Schema):
divisionid = fields.Int(data_key="divisionid", allow_none=True)
documentids = fields.List(fields.Int())

class SummarySchema(Schema):
pkgdocuments = fields.List(fields.Nested(SummaryPkgSchema, allow_none=True))
sorteddocuments = fields.List(fields.Int())

class FinalPackageSchema(Schema):
ministryrequestid = fields.Str(data_key="ministryrequestid", allow_none=False)
category = fields.Str(data_key="category", allow_none=False)
Expand All @@ -18,3 +26,5 @@ class FinalPackageSchema(Schema):
attributes = fields.Nested(
AttributeSchema, many=True, required=True, allow_none=False
)
summarydocuments = fields.Nested(SummarySchema, allow_none=True)
redactionlayerid = fields.Int(data_key="redactionlayerid", allow_none=False)
11 changes: 10 additions & 1 deletion api/reviewer_api/schemas/redline.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,13 @@ class AttributeSchema(Schema):
divisionname = fields.Str(data_key="divisionname", allow_none=True)
divisionid = fields.Int(data_key="divisionid", allow_none=True)

class SummaryPkgSchema(Schema):
divisionid = fields.Int(data_key="divisionid", allow_none=True)
documentids = fields.List(fields.Int())

class SummarySchema(Schema):
pkgdocuments = fields.List(fields.Nested(SummaryPkgSchema, allow_none=True))
sorteddocuments = fields.List(fields.Int())

class RedlineSchema(Schema):
ministryrequestid = fields.Str(data_key="ministryrequestid", allow_none=False)
Expand All @@ -19,4 +26,6 @@ class RedlineSchema(Schema):
bcgovcode = fields.Str(data_key="bcgovcode", allow_none=False)
attributes = fields.Nested(
AttributeSchema, many=True, required=True, allow_none=False
)
)
summarydocuments = fields.Nested(SummarySchema, allow_none=True)
redactionlayerid = fields.Int(data_key="redactionlayerid", allow_none=False)
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import os
from walrus import Database
from reviewer_api.models.default_method_result import DefaultMethodResult
import logging
from os import getenv

class documentserviceproducerservice:
"""This class is reserved for integration with event queue (currently redis streams)."""

host = os.getenv("DOCUMENTSERVICE_REDIS_HOST")
port = os.getenv("DOCUMENTSERVICE_REDIS_PORT")
password = os.getenv("DOCUMENTSERVICE_REDIS_PASSWORD")

db = Database(host=host, port=port, db=0, password=password)

def add(self, payload):
try:
stream = self.db.Stream(self.__streamkey())
msgid = stream.add(payload, id="*")
return DefaultMethodResult(True, "Added to stream", msgid.decode("utf-8"))
except Exception as err:
logging.error("Error in contacting Redis Stream")
logging.error(err)
return DefaultMethodResult(False, err, -1)

def __streamkey(self):
return getenv("DOCUMENTSERVICE_STREAM_KEY")
8 changes: 6 additions & 2 deletions api/reviewer_api/services/external/zipperproducerservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from walrus import Database
from reviewer_api.models.default_method_result import DefaultMethodResult
import logging
from os import getenv


class zipperproducerservice:
Expand All @@ -13,12 +14,15 @@ class zipperproducerservice:

db = Database(host=host, port=port, db=0, password=password)

def add(self, streamkey, payload):
def add(self, payload):
try:
stream = self.db.Stream(streamkey)
stream = self.db.Stream(self.__streamkey())
msgid = stream.add(payload, id="*")
return DefaultMethodResult(True, "Added to stream", msgid.decode("utf-8"))
except Exception as err:
logging.error("Error in contacting Redis Stream")
logging.error(err)
return DefaultMethodResult(False, err, -1)

def __streamkey(self):
return getenv("ZIPPER_STREAM_KEY")
15 changes: 8 additions & 7 deletions api/reviewer_api/services/radactionservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,15 @@
from reviewer_api.services.annotationservice import annotationservice
from reviewer_api.services.documentpageflagservice import documentpageflagservice
from reviewer_api.services.jobrecordservice import jobrecordservice
from reviewer_api.services.external.zipperproducerservice import zipperproducerservice
from reviewer_api.services.external.documentserviceproducerservice import documentserviceproducerservice

from reviewer_api.utils.util import to_json
from datetime import datetime

import json

class redactionservice:
"""FOI Document management service"""

zipperstreamkey = getenv("ZIPPER_STREAM_KEY")


def getannotationsbyrequest(
Expand Down Expand Up @@ -193,13 +192,13 @@ def triggerdownloadredlinefinalpackage(self, finalpackageschema, userinfo):
_jobmessage, userinfo["userid"]
)
if job.success:
_message = self.__preparemessageforzipservice(
_message = self.__preparemessageforsummaryservice(
finalpackageschema, userinfo, job
)
return zipperproducerservice().add(self.zipperstreamkey, _message)
return documentserviceproducerservice().add(_message)

# redline/final package download: prepare message for zipping service
def __preparemessageforzipservice(self, messageschema, userinfo, job):
def __preparemessageforsummaryservice(self, messageschema, userinfo, job):
_message = {
"jobid": job.identifier,
"requestid": -1,
Expand All @@ -211,8 +210,10 @@ def __preparemessageforzipservice(self, messageschema, userinfo, job):
"filestozip": to_json(
self.__preparefilestozip(messageschema["attributes"])
),
"finaloutput": to_json({}),
"finaloutput": to_json(""),
"attributes": to_json(messageschema["attributes"]),
"summarydocuments": json.dumps(messageschema["summarydocuments"]),
"redactionlayerid": json.dumps(messageschema["redactionlayerid"])
}
return _message

Expand Down
2 changes: 2 additions & 0 deletions computingservices/DocumentServices/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
__pycache__/*
*.pyc
29 changes: 29 additions & 0 deletions computingservices/DocumentServices/.sampleenv
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@

#Properties of Document Service - Begin
DOCUMENTSERVICE_REDIS_HOST=
DOCUMENTSERVICE_REDIS_PORT=
DOCUMENTSERVICE_REDIS_PASSWORD=
DOCUMENTSERVICE_STREAM_KEY=DOCUMENTSERVICE

ZIPPER_REDIS_HOST=
ZIPPER_REDIS_PORT=
ZIPPER_REDIS_PASSWORD=
ZIPPER_STREAM_KEY=ZIPPER_STREAM

DOCUMENTSERVICE_DB_HOST=
DOCUMENTSERVICE_DB_NAME=
DOCUMENTSERVICE_DB_PORT=
DOCUMENTSERVICE_DB_USER=
DOCUMENTSERVICE_DB_PASSWORD=

DOCUMENTSERVICE_S3_HOST=
DOCUMENTSERVICE_S3_REGION=
DOCUMENTSERVICE_S3_SERVICE=
DOCUMENTSERVICE_S3_ENV=

FOI_DB_USER=
FOI_DB_PASSWORD=
FOI_DB_NAME=
FOI_DB_HOST=
FOI_DB_PORT=
#Properties of Document Service - End
18 changes: 18 additions & 0 deletions computingservices/DocumentServices/DockerFile.bcgov
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
FROM artifacts.developer.gov.bc.ca/docker-remote/python:3.10.8-buster

# Keeps Python from generating .pyc files in the container
ENV PYTHONDONTWRITEBYTECODE=1

# Turns off buffering for easier container logging
ENV PYTHONUNBUFFERED=1

RUN useradd --create-home --shell /bin/bash app_user
WORKDIR /home/app_user
COPY requirements.txt ./
RUN apt-get update \
&& apt-get -y install libpq-dev gcc \
&& pip install psycopg2
RUN pip install --no-cache-dir -r requirements.txt
USER app_user
COPY . .
ENTRYPOINT ["/bin/sh", "./entrypoint.sh"]
18 changes: 18 additions & 0 deletions computingservices/DocumentServices/Dockerfile.local
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
FROM python:3.10.8

# Keeps Python from generating .pyc files in the container
ENV PYTHONDONTWRITEBYTECODE=1

# Turns off buffering for easier container logging
ENV PYTHONUNBUFFERED=1

RUN useradd --create-home --shell /bin/bash app_user
WORKDIR /home/app_user
COPY requirements.txt ./
RUN apt-get update \
&& apt-get -y install libpq-dev gcc \
&& pip install psycopg2
RUN pip install --no-cache-dir -r requirements.txt
USER app_user
COPY . .
ENTRYPOINT ["/bin/sh", "./entrypoint.sh"]
Empty file.
6 changes: 6 additions & 0 deletions computingservices/DocumentServices/__main__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from rstreamio.reader import documentservicestreamreader



if __name__ == '__main__':
documentservicestreamreader.app()
Loading

0 comments on commit 2cdff9c

Please sign in to comment.