From 77d3c19343b12cabfee2c70d8bddfbe9c1bf7076 Mon Sep 17 00:00:00 2001 From: Richard Qi Date: Wed, 18 Dec 2024 21:53:21 -0800 Subject: [PATCH] move code to dev-rook branch instead of test-rook/test-marshal --- .github/workflows/openinfoservice-cd.yml | 113 +++++ .github/workflows/openinfoservice-ci.yml | 46 ++ computingservices/OpenInfoServices/.sampleenv | 21 + .../Dockerfile.enqueueforpublish.local | 14 + .../Dockerfile.enqueueforunpublish.local | 14 + .../OpenInfoServices/Dockerfile.local | 14 + .../OpenInfoServices/Dockerfile.sitemap.local | 14 + .../OpenInfoServices/config/config.go | 155 +++++++ computingservices/OpenInfoServices/go.mod | 51 +++ computingservices/OpenInfoServices/go.sum | 102 +++++ .../OpenInfoServices/lib/awslib/s3.go | 427 ++++++++++++++++++ .../OpenInfoServices/lib/db/dbservices.go | 417 +++++++++++++++++ .../OpenInfoServices/lib/files/html.go | 43 ++ .../OpenInfoServices/lib/queue/redis.go | 36 ++ computingservices/OpenInfoServices/main.go | 344 ++++++++++++++ .../OpenInfoServices/main_test.go | 12 + .../services/messagehandler.go | 173 +++++++ .../OpenInfoServices/templates/template.html | 14 + docker-compose.yml | 32 ++ .../openinfoservice-build.yaml | 61 +++ .../openinfoservice-deploy.yaml | 196 ++++++++ ...eninfoservice-enqueueforpublish-build.yaml | 61 +++ ...infoservice-enqueueforunpublish-build.yaml | 61 +++ .../openinfoservice-sitemap-build.yaml | 61 +++ sample.env | 25 +- 25 files changed, 2506 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/openinfoservice-cd.yml create mode 100644 .github/workflows/openinfoservice-ci.yml create mode 100644 computingservices/OpenInfoServices/.sampleenv create mode 100644 computingservices/OpenInfoServices/Dockerfile.enqueueforpublish.local create mode 100644 computingservices/OpenInfoServices/Dockerfile.enqueueforunpublish.local create mode 100644 computingservices/OpenInfoServices/Dockerfile.local create mode 100644 computingservices/OpenInfoServices/Dockerfile.sitemap.local create mode 100644 computingservices/OpenInfoServices/config/config.go create mode 100644 computingservices/OpenInfoServices/go.mod create mode 100644 computingservices/OpenInfoServices/go.sum create mode 100644 computingservices/OpenInfoServices/lib/awslib/s3.go create mode 100644 computingservices/OpenInfoServices/lib/db/dbservices.go create mode 100644 computingservices/OpenInfoServices/lib/files/html.go create mode 100644 computingservices/OpenInfoServices/lib/queue/redis.go create mode 100644 computingservices/OpenInfoServices/main.go create mode 100644 computingservices/OpenInfoServices/main_test.go create mode 100644 computingservices/OpenInfoServices/services/messagehandler.go create mode 100644 computingservices/OpenInfoServices/templates/template.html create mode 100644 openshift/templates/openinfoservice/openinfoservice-build.yaml create mode 100644 openshift/templates/openinfoservice/openinfoservice-deploy.yaml create mode 100644 openshift/templates/openinfoservice/openinfoservice-enqueueforpublish-build.yaml create mode 100644 openshift/templates/openinfoservice/openinfoservice-enqueueforunpublish-build.yaml create mode 100644 openshift/templates/openinfoservice/openinfoservice-sitemap-build.yaml diff --git a/.github/workflows/openinfoservice-cd.yml b/.github/workflows/openinfoservice-cd.yml new file mode 100644 index 000000000..22120134a --- /dev/null +++ b/.github/workflows/openinfoservice-cd.yml @@ -0,0 +1,113 @@ +name: OpenInfo Service CD + + +on: + push: + branches: + - dev + - main + - dev-marshal + - test-marshal + - dev-rook + - test-rook + paths: + - "computingservices/OpenInfoServices/**" + - ".github/workflows/openinfoservice-cd.yml" + +defaults: + run: + shell: bash + working-directory: ./computingservices/OpenInfoServices + +env: + APP_NAME: "reviewer-openinfoservice" + TOOLS_NAME: "${{secrets.OPENSHIFT4_REPOSITORY}}" + +jobs: + openinfoservice-cd-by-push: + runs-on: ubuntu-20.04 + + if: github.event_name == 'push' && github.repository == 'bcgov/foi-docreviewer' + steps: + - uses: actions/checkout@v2 + - name: Set ENV variables for dev branch + if: ${{ github.ref_name == 'dev' }} + shell: bash + run: | + echo "For ${{ github.ref_name }} branch" + echo "TAG_NAME=dev" >> $GITHUB_ENV + echo "BRANCH_NAME=dev" >> $GITHUB_ENV + echo "ENV_NAME=dev" >> $GITHUB_ENV + + - name: Set ENV variables for main branch + if: ${{ github.ref_name == 'main' }} + shell: bash + run: | + echo "For ${{ github.ref_name }} branch" + echo "TAG_NAME=test" >> $GITHUB_ENV + echo "BRANCH_NAME=main" >> $GITHUB_ENV + echo "ENV_NAME=test" >> $GITHUB_ENV + + - name: Set ENV variables for dev-marshal branch + if: ${{ github.ref_name == 'dev-marshal' }} + run: | + echo "For ${{ github.ref_name }} branch" + echo "TAG_NAME=dev-marshal" >> $GITHUB_ENV + echo "BRANCH_NAME=dev-marshal" >> $GITHUB_ENV + echo "ENV_NAME=dev" >> $GITHUB_ENV + + - name: Set ENV variables for test-marshal branch + if: ${{ github.ref_name == 'test-marshal' }} + run: | + echo "For ${{ github.ref_name }} branch" + echo "TAG_NAME=test-marshal" >> $GITHUB_ENV + echo "BRANCH_NAME=test-marshal" >> $GITHUB_ENV + echo "ENV_NAME=test" >> $GITHUB_ENV + + - name: Set ENV variables for dev-rook branch + if: ${{ github.ref_name == 'dev-rook' }} + run: | + echo "For ${{ github.ref_name }} branch" + echo "TAG_NAME=dev-rook" >> $GITHUB_ENV + echo "BRANCH_NAME=dev-rook" >> $GITHUB_ENV + echo "ENV_NAME=dev" >> $GITHUB_ENV + echo "ENV_TAG_NAME=rook" >> $GITHUB_ENV + + - name: Set ENV variables for test-rook branch + if: ${{ github.ref_name == 'test-rook' }} + run: | + echo "For ${{ github.ref_name }} branch" + echo "TAG_NAME=test-rook" >> $GITHUB_ENV + echo "BRANCH_NAME=test-rook" >> $GITHUB_ENV + echo "ENV_NAME=test" >> $GITHUB_ENV + echo "ENV_TAG_NAME=rook" >> $GITHUB_ENV + + - name: Login Openshift + shell: bash + run: | + oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}} + + - name: Tools project + shell: bash + run: | + oc project ${{ env.TOOLS_NAME }}-tools + + - name: Build from ${{ env.BRANCH_NAME }} branch + shell: bash + run: | + oc patch bc/${{ env.APP_NAME }}-build -p '{"spec":{"source":{"contextDir":"/computingservices/OpenInfoServices","git":{"ref":"${{ env.BRANCH_NAME }}"}}}}' + + - name: Start Build Openshift + shell: bash + run: | + oc start-build ${{ env.APP_NAME }}-build --wait + + - name: Tag+Deploy for ${{ env.TAG_NAME }} + shell: bash + run: | + oc tag ${{ env.APP_NAME }}:latest ${{ env.APP_NAME }}:${{ env.TAG_NAME }} + + # - name: Watch new rollout (trigger by image change in Openshift) + # shell: bash + # run: | + # oc rollout status dc/${{ env.APP_NAME }}-{{ env.ENV_TAG_NAME }} -n ${{ env.TOOLS_NAME }}-${{ env.ENV_NAME }} -w diff --git a/.github/workflows/openinfoservice-ci.yml b/.github/workflows/openinfoservice-ci.yml new file mode 100644 index 000000000..aeb663ea7 --- /dev/null +++ b/.github/workflows/openinfoservice-ci.yml @@ -0,0 +1,46 @@ +name: Open Info Service CI + + +on: + pull_request: + branches: + - main + - dev + - dev-marshal + - test-marshal + - dev-rook + - test-rook + paths: + - "computingservices/OpenInfoServices/**" + +defaults: + run: + shell: bash + working-directory: ./computingservices/OpenInfoServices + +jobs: + docker-build-check: + runs-on: ubuntu-latest + name: Build dockerfile to ensure it works + + steps: + - uses: actions/checkout@v2 + - name: docker build to check strictness + id: docker-build + run: | + docker build -f Dockerfile.local . + + go-build-check: + runs-on: ubuntu-latest + name: Build go to ensure it works + + steps: + - uses: actions/checkout@v3 + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version: '1.23' + - name: Build the application + run: go build -v . + - name: Test the application + run: go test ./... diff --git a/computingservices/OpenInfoServices/.sampleenv b/computingservices/OpenInfoServices/.sampleenv new file mode 100644 index 000000000..423b29f80 --- /dev/null +++ b/computingservices/OpenInfoServices/.sampleenv @@ -0,0 +1,21 @@ +FOI_DB_USER= +FOI_DB_PASSWORD= +FOI_DB_NAME= +FOI_DB_HOST= +FOI_DB_PORT= + +OI_REDIS_HOST= +OI_REDIS_PORT= +OI_REDIS_PASSWORD= +OI_QUEUE_NAME=OpenInfoQueue + +OI_S3_ENV= +OI_S3_HOST= +OI_S3_REGION=us-east-1 +OI_S3_BUCKET=openinfopub +OI_ACCESS_KEY= +OI_SECRET_KEY= + +OI_PREFIX=packages/ +SITEMAP_PREFIX=sitemap/ +SITEMAP_PAGES_LIMIT=5000 \ No newline at end of file diff --git a/computingservices/OpenInfoServices/Dockerfile.enqueueforpublish.local b/computingservices/OpenInfoServices/Dockerfile.enqueueforpublish.local new file mode 100644 index 000000000..4fd4f50fb --- /dev/null +++ b/computingservices/OpenInfoServices/Dockerfile.enqueueforpublish.local @@ -0,0 +1,14 @@ +FROM golang:1.23-alpine AS builder +WORKDIR /app +COPY go.mod go.sum ./ +RUN go mod download +COPY . . +RUN go build -o main . + +# Use a minimal base image to run the application +FROM alpine:latest +WORKDIR /app +COPY --from=builder /app/main . +COPY --from=builder /app/templates/ ./templates/. +ENTRYPOINT ["./main"] +CMD ["enqueueforpublish"] diff --git a/computingservices/OpenInfoServices/Dockerfile.enqueueforunpublish.local b/computingservices/OpenInfoServices/Dockerfile.enqueueforunpublish.local new file mode 100644 index 000000000..e434c37fc --- /dev/null +++ b/computingservices/OpenInfoServices/Dockerfile.enqueueforunpublish.local @@ -0,0 +1,14 @@ +FROM golang:1.23-alpine AS builder +WORKDIR /app +COPY go.mod go.sum ./ +RUN go mod download +COPY . . +RUN go build -o main . + +# Use a minimal base image to run the application +FROM alpine:latest +WORKDIR /app +COPY --from=builder /app/main . +COPY --from=builder /app/templates/ ./templates/. +ENTRYPOINT ["./main"] +CMD ["enqueueforunpublish"] diff --git a/computingservices/OpenInfoServices/Dockerfile.local b/computingservices/OpenInfoServices/Dockerfile.local new file mode 100644 index 000000000..820d739ac --- /dev/null +++ b/computingservices/OpenInfoServices/Dockerfile.local @@ -0,0 +1,14 @@ +FROM golang:1.23-alpine AS builder +WORKDIR /app +COPY go.mod go.sum ./ +RUN go mod download +COPY . . +RUN go build -o main . + +# Use a minimal base image to run the application +FROM alpine:latest +WORKDIR /app +COPY --from=builder /app/main . +COPY --from=builder /app/templates/ ./templates/. +ENTRYPOINT ["./main"] +CMD ["dequeue"] diff --git a/computingservices/OpenInfoServices/Dockerfile.sitemap.local b/computingservices/OpenInfoServices/Dockerfile.sitemap.local new file mode 100644 index 000000000..1a6192156 --- /dev/null +++ b/computingservices/OpenInfoServices/Dockerfile.sitemap.local @@ -0,0 +1,14 @@ +FROM golang:1.23-alpine AS builder +WORKDIR /app +COPY go.mod go.sum ./ +RUN go mod download +COPY . . +RUN go build -o main . + +# Use a minimal base image to run the application +FROM alpine:latest +WORKDIR /app +COPY --from=builder /app/main . +COPY --from=builder /app/templates/ ./templates/. +ENTRYPOINT ["./main"] +CMD ["sitemap"] diff --git a/computingservices/OpenInfoServices/config/config.go b/computingservices/OpenInfoServices/config/config.go new file mode 100644 index 000000000..58b555a22 --- /dev/null +++ b/computingservices/OpenInfoServices/config/config.go @@ -0,0 +1,155 @@ +package myconfig + +import ( + "log" + "os" + "strconv" + "sync" + + "github.com/spf13/viper" +) + +var ( + //DB + host string + port string + user string + password string + dbname string + + //Redis + queue string + queuehost string + queueport string + queuepassword string + + //S3 + s3url string + oibucket string + oiprefix string + sitemapprefix string + sitemaplimit int + region string + accessKey string + secretKey string + s3host string + + env string + + onceDB sync.Once + onceRedis sync.Once + onceS3 sync.Once + onceS3Path sync.Once + onceOthers sync.Once +) + +// use viper package to read .env file +// return the value of the key +func viperEnvVariable(key string) string { + + // SetConfigFile explicitly defines the path, name and extension of the config file. + // Viper will use this and not check any of the config paths. + // .env - It will search for the .env file in given path + viper.SetConfigFile(getEnv("ENVFILE_PATH")) + + // Find and read the config file + err := viper.ReadInConfig() + + if err != nil { + log.Fatalf("Error while reading config file %s", err) + } + + // viper.Get() returns an empty interface{} + // to get the underlying type of the key, + // we have to do the type assertion, we know the underlying value is string + // if we type assert to other type it will throw an error + value, ok := viper.Get(key).(string) + + // If the type is a string then ok will be true + // ok will make sure the program not break + if !ok { + log.Fatalf("Invalid type assertion") + } + + return value +} + +// Lazy initialization functions +func loadConfigDB() { + host = getEnv("FOI_DB_HOST") + port = getEnv("FOI_DB_PORT") + user = getEnv("FOI_DB_USER") + password = getEnv("FOI_DB_PASSWORD") + dbname = getEnv("FOI_DB_NAME") +} + +func loadConfigRedis() { + queuehost = getEnv("OI_REDIS_HOST") + queueport = getEnv("OI_REDIS_PORT") + queuepassword = getEnv("OI_REDIS_PASSWORD") +} + +func loadConfigS3() { + region = getEnv("OI_S3_REGION") + accessKey = getEnv("OI_ACCESS_KEY") + secretKey = getEnv("OI_SECRET_KEY") + s3host = getEnv("OI_S3_HOST") +} + +func loadConfigS3Path() { + s3url = "https://" + getEnv("OI_S3_HOST") + "/" + oibucket = getEnv("OI_S3_BUCKET") + oiprefix = getEnv("OI_PREFIX") + sitemapprefix = getEnv("SITEMAP_PREFIX") + + var strerr error + sitemaplimit, strerr = strconv.Atoi(getEnv("SITEMAP_PAGES_LIMIT")) + if strerr != nil { + log.Printf("Error converting string to int for SITEMAP_PAGES_LIMIT, will use default value: %v", strerr) + sitemaplimit = 5000 + } +} + +func loadConfigOther() { + env = getEnv("OI_S3_ENV") + queue = getEnv("OI_QUEUE_NAME") +} + +// Helper function to get environment variables +func getEnv(key string) string { + value, exists := os.LookupEnv(key) + if !exists { + return viperEnvVariable(key) + } + return value +} + +// GetDB retrieves the database variables with lazy initialization +func GetDB() (string, string, string, string, string) { + onceDB.Do(loadConfigDB) // Ensures loadConfig is called only once + return host, port, user, password, dbname +} + +// GetRedis retrieves the redis variables with lazy initialization +func GetRedis() (string, string, string) { + onceRedis.Do(loadConfigRedis) // Ensures loadConfig is called only once + return queuehost, queueport, queuepassword +} + +// GetS3 retrieves the S3 variables with lazy initialization +func GetS3() (string, string, string, string) { + onceS3.Do(loadConfigS3) // Ensures loadConfig is called only once + return region, accessKey, secretKey, s3host +} + +// GetS3 retrieves the S3 variables with lazy initialization +func GetS3Path() (string, string, string, string, int) { + onceS3Path.Do(loadConfigS3Path) // Ensures loadConfig is called only once + return s3url, oibucket, oiprefix, sitemapprefix, sitemaplimit +} + +// GetS3 retrieves the S3 variables with lazy initialization +func GetOthers() (string, string) { + onceOthers.Do(loadConfigOther) // Ensures loadConfig is called only once + return env, queue +} diff --git a/computingservices/OpenInfoServices/go.mod b/computingservices/OpenInfoServices/go.mod new file mode 100644 index 000000000..f3d6bb8aa --- /dev/null +++ b/computingservices/OpenInfoServices/go.mod @@ -0,0 +1,51 @@ +module OpenInfoServices + +go 1.23.2 + +require ( + github.com/aws/aws-sdk-go-v2 v1.32.4 + github.com/aws/aws-sdk-go-v2/config v1.28.4 + github.com/aws/aws-sdk-go-v2/credentials v1.17.45 + github.com/aws/aws-sdk-go-v2/service/s3 v1.67.0 + github.com/aws/smithy-go v1.22.1 + github.com/lib/pq v1.10.9 + github.com/redis/go-redis/v9 v9.7.0 +) + +require ( + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.19 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.23 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.23 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.23 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.4 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.4 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.4 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.24.5 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.4 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.33.0 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/fsnotify/fsnotify v1.7.0 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/magiconair/properties v1.8.7 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/pelletier/go-toml/v2 v2.2.2 // indirect + github.com/sagikazarmark/locafero v0.4.0 // indirect + github.com/sagikazarmark/slog-shim v0.1.0 // indirect + github.com/sourcegraph/conc v0.3.0 // indirect + github.com/spf13/afero v1.11.0 // indirect + github.com/spf13/cast v1.6.0 // indirect + github.com/spf13/pflag v1.0.5 // indirect + github.com/spf13/viper v1.19.0 // indirect + github.com/subosito/gotenv v1.6.0 // indirect + go.uber.org/atomic v1.9.0 // indirect + go.uber.org/multierr v1.9.0 // indirect + golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect + golang.org/x/sys v0.18.0 // indirect + golang.org/x/text v0.14.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/computingservices/OpenInfoServices/go.sum b/computingservices/OpenInfoServices/go.sum new file mode 100644 index 000000000..2dc1f8c7b --- /dev/null +++ b/computingservices/OpenInfoServices/go.sum @@ -0,0 +1,102 @@ +github.com/aws/aws-sdk-go-v2 v1.32.4 h1:S13INUiTxgrPueTmrm5DZ+MiAo99zYzHEFh1UNkOxNE= +github.com/aws/aws-sdk-go-v2 v1.32.4/go.mod h1:2SK5n0a2karNTv5tbP1SjsX0uhttou00v/HpXKM1ZUo= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 h1:pT3hpW0cOHRJx8Y0DfJUEQuqPild8jRGmSFmBgvydr0= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6/go.mod h1:j/I2++U0xX+cr44QjHay4Cvxj6FUbnxrgmqN3H1jTZA= +github.com/aws/aws-sdk-go-v2/config v1.28.4 h1:qgD0MKmkIzZR2DrAjWJcI9UkndjR+8f6sjUQvXh0mb0= +github.com/aws/aws-sdk-go-v2/config v1.28.4/go.mod h1:LgnWnNzHZw4MLplSyEGia0WgJ/kCGD86zGCjvNpehJs= +github.com/aws/aws-sdk-go-v2/credentials v1.17.45 h1:DUgm5lFso57E7150RBgu1JpVQoF8fAPretiDStIuVjg= +github.com/aws/aws-sdk-go-v2/credentials v1.17.45/go.mod h1:dnBpENcPC1ekZrGpSWspX+ZRGzhkvqngT2Qp5xBR1dY= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.19 h1:woXadbf0c7enQ2UGCi8gW/WuKmE0xIzxBF/eD94jMKQ= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.19/go.mod h1:zminj5ucw7w0r65bP6nhyOd3xL6veAUMc3ElGMoLVb4= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.23 h1:A2w6m6Tmr+BNXjDsr7M90zkWjsu4JXHwrzPg235STs4= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.23/go.mod h1:35EVp9wyeANdujZruvHiQUAo9E3vbhnIO1mTCAxMlY0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.23 h1:pgYW9FCabt2M25MoHYCfMrVY2ghiiBKYWUVXfwZs+sU= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.23/go.mod h1:c48kLgzO19wAu3CPkDWC28JbaJ+hfQlsdl7I2+oqIbk= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 h1:VaRN3TlFdd6KxX1x3ILT5ynH6HvKgqdiXoTxAF4HQcQ= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.23 h1:1SZBDiRzzs3sNhOMVApyWPduWYGAX0imGy06XiBnCAM= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.23/go.mod h1:i9TkxgbZmHVh2S0La6CAXtnyFhlCX/pJ0JsOvBAS6Mk= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0 h1:TToQNkvGguu209puTojY/ozlqy2d/SFNcoLIqTFi42g= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0/go.mod h1:0jp+ltwkf+SwG2fm/PKo8t4y8pJSgOCO4D8Lz3k0aHQ= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.4 h1:aaPpoG15S2qHkWm4KlEyF01zovK1nW4BBbyXuHNSE90= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.4/go.mod h1:eD9gS2EARTKgGr/W5xwgY/ik9z/zqpW+m/xOQbVxrMk= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.4 h1:tHxQi/XHPK0ctd/wdOw0t7Xrc2OxcRCnVzv8lwWPu0c= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.4/go.mod h1:4GQbF1vJzG60poZqWatZlhP31y8PGCCVTvIGPdaaYJ0= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.4 h1:E5ZAVOmI2apR8ADb72Q63KqwwwdW1XcMeXIlrZ1Psjg= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.4/go.mod h1:wezzqVUOVVdk+2Z/JzQT4NxAU0NbhRe5W8pIE72jsWI= +github.com/aws/aws-sdk-go-v2/service/s3 v1.67.0 h1:SwaJ0w0MOp0pBTIKTamLVeTKD+iOWyNJRdJ2KCQRg6Q= +github.com/aws/aws-sdk-go-v2/service/s3 v1.67.0/go.mod h1:TMhLIyRIyoGVlaEMAt+ITMbwskSTpcGsCPDq91/ihY0= +github.com/aws/aws-sdk-go-v2/service/sso v1.24.5 h1:HJwZwRt2Z2Tdec+m+fPjvdmkq2s9Ra+VR0hjF7V2o40= +github.com/aws/aws-sdk-go-v2/service/sso v1.24.5/go.mod h1:wrMCEwjFPms+V86TCQQeOxQF/If4vT44FGIOFiMC2ck= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.4 h1:zcx9LiGWZ6i6pjdcoE9oXAB6mUdeyC36Ia/QEiIvYdg= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.4/go.mod h1:Tp/ly1cTjRLGBBmNccFumbZ8oqpZlpdhFf80SrRh4is= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.0 h1:s7LRgBqhwLaxcocnAniBJp7gaAB+4I4vHzqUqjH18yc= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.0/go.mod h1:9XEUty5v5UAsMiFOBJrNibZgwCeOma73jgGwwhgffa8= +github.com/aws/smithy-go v1.22.1 h1:/HPHZQ0g7f4eUeK6HKglFz8uwVfZKgoI25rb/J+dnro= +github.com/aws/smithy-go v1.22.1/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= +github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= +github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= +github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= +github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= +github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= +github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/redis/go-redis/v9 v9.7.0 h1:HhLSs+B6O021gwzl+locl0zEDnyNkxMtf/Z3NNBMa9E= +github.com/redis/go-redis/v9 v9.7.0/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw= +github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= +github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= +github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= +github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= +github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= +github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= +github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= +github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= +github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= +github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI= +github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= +github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= +go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= +go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI= +go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ= +golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g= +golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k= +golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= +golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/computingservices/OpenInfoServices/lib/awslib/s3.go b/computingservices/OpenInfoServices/lib/awslib/s3.go new file mode 100644 index 000000000..68dcbebf5 --- /dev/null +++ b/computingservices/OpenInfoServices/lib/awslib/s3.go @@ -0,0 +1,427 @@ +package awslib + +import ( + myconfig "OpenInfoServices/config" + "OpenInfoServices/lib/files" + "bytes" + "context" + "encoding/xml" + "fmt" + "log" + "net/url" + "path" + "path/filepath" + "strings" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/credentials" + "github.com/aws/aws-sdk-go-v2/service/s3" + smithyendpoints "github.com/aws/smithy-go/endpoints" +) + +var ( + //S3 + s3url string + oibucket string + oiprefix string + sitemapprefix string + sitemaplimit int + region string + accessKey string + secretKey string + s3host string +) + +type ScanResult struct { + LetterNames string + LetterSizes string + FileNames string + FileSizes string + Links []files.Link +} + +type SiteMap struct { + XMLName xml.Name `xml:"sitemap"` + Loc string `xml:"loc"` + LastMod string `xml:"lastmod"` +} + +type SitemapIndex struct { + XMLName xml.Name `xml:"sitemapindex"` + Sitemaps []SiteMap `xml:"sitemap"` +} + +type Url struct { + XMLName xml.Name `xml:"url"` + Loc string `xml:"loc"` + LastMod string `xml:"lastmod"` +} + +type UrlSet struct { + XMLName xml.Name `xml:"urlset"` + Urls []Url `xml:"url"` +} + +type AdditionalFile struct { + Additionalfileid int `json:"additionalfileid"` + Filename string `json:"filename"` + S3uripath string `json:"s3uripath"` +} + +type resolverV2 struct{} + +func (*resolverV2) ResolveEndpoint(ctx context.Context, params s3.EndpointParameters) ( + smithyendpoints.Endpoint, error, +) { + // s3.Options.BaseEndpoint is accessible here: + // fmt.Printf("The endpoint provided in config is %s\n", *params.Endpoint) + + // fallback to default + return s3.NewDefaultEndpointResolverV2().ResolveEndpoint(ctx, params) +} + +// CreateS3Client creates and returns an S3 client +func CreateS3Client() *s3.Client { + + region, accessKey, secretKey, s3host := myconfig.GetS3() + + // Replace with your custom endpoint + customEndpoint := "https://" + s3host + "/" + + // Load the AWS configuration with credentials + cfg, err := config.LoadDefaultConfig(context.TODO(), + config.WithRegion(region), + config.WithCredentialsProvider(credentials.NewStaticCredentialsProvider(accessKey, secretKey, "")), + ) + if err != nil { + log.Fatalf("unable to load SDK config, %v", err) + } + + // Create an S3 service client + return s3.NewFromConfig(cfg, func(o *s3.Options) { + o.BaseEndpoint = aws.String(customEndpoint) + o.EndpointResolverV2 = &resolverV2{} + o.UsePathStyle = true + }) +} + +func ScanS3(openInfoBucket string, openInfoPrefix string, urlPrefix string, filemappings []AdditionalFile) (ScanResult, error) { + bucket := openInfoBucket //"dev-openinfopub" + prefix := openInfoPrefix //"poc/packages/HSG_2024_40515/" // Folder prefix in the bucket + + svc := CreateS3Client() + + // List objects in the bucket folder + resp, errors := svc.ListObjectsV2(context.TODO(), &s3.ListObjectsV2Input{ + Bucket: aws.String(bucket), + Prefix: aws.String(prefix), + }) + if errors != nil { + log.Fatalf("unable to list items in bucket %q, %v", bucket, errors) + } + + var Result ScanResult + var filePath string = "" + var matched bool + var fileType string = "unknown" + var err error + var letterLinks []files.Link + var fileLinks []files.Link + + supportedFileTypes := []string{".pdf", ".xls", ".xlsx"} + + // Print file information + for _, item := range resp.Contents { + + filePath = *item.Key + // fmt.Printf("*item.Key %v\n", *item.Key) + + // Get the file name + base := path.Base(filePath) + originalFileName, found := getOriginalName(filemappings, base) + if found { + base = originalFileName + } + // fmt.Printf("Base %s\n", base) + // fmt.Printf("Name: %s, Size: %d bytes\n", filePath, *item.Size) + + // Find response letters + patternResponseLetter := "Response_Letter_*.pdf" + matched, err = filepath.Match(patternResponseLetter, base) + if err != nil { + log.Fatalf("error matching pattern, %v", err) + } + + if matched { + letterLinks = append(letterLinks, files.Link{FileName: base, URL: urlPrefix + base}) + if Result.LetterNames == "" { + Result.LetterNames = base + Result.LetterSizes = fmt.Sprintf("%.2f", (float64(*item.Size) / (1024 * 1024))) + } else { + Result.LetterNames = Result.LetterNames + "," + base + Result.LetterSizes = Result.LetterSizes + "," + fmt.Sprintf("%.2f", (float64(*item.Size)/(1024*1024))) + } + } else { + // Other files + fileType = getFileType(*item.Key) + if contains(supportedFileTypes, strings.ToLower(fileType)) { + fileLinks = append(fileLinks, files.Link{FileName: base, URL: urlPrefix + base}) + if Result.FileNames == "" { + Result.FileNames = base + Result.FileSizes = fmt.Sprintf("%.2f", (float64(*item.Size) / (1024 * 1024))) + } else { + Result.FileNames = Result.FileNames + "," + base + Result.FileSizes = Result.FileSizes + "," + fmt.Sprintf("%.2f", (float64(*item.Size)/(1024*1024))) + } + } + } + Result.Links = append(letterLinks, fileLinks...) + } + + // fmt.Printf("Combined letter names: %s, letter size: %s mb\n", Result.LetterNames, Result.LetterSizes) + // fmt.Printf("Combined file names: %s, file size: %s mb\n", Result.FileNames, Result.FileSizes) + return Result, errors +} + +func CopyS3(sourceBucket string, sourcePrefix string, filemappings []AdditionalFile) { + s3url, oibucket, oiprefix, sitemapprefix, sitemaplimit = myconfig.GetS3Path() + env, _ := myconfig.GetOthers() + + // bucket := "dev-openinfopub" + bucket := sourceBucket + prefix := sourcePrefix + destBucket := env + "-" + oibucket + destPrefix := oiprefix + + svc := CreateS3Client() + + // List objects in the bucket folder + resp, err := svc.ListObjectsV2(context.TODO(), &s3.ListObjectsV2Input{ + Bucket: aws.String(bucket), + Prefix: aws.String(prefix), + }) + if err != nil { + log.Fatalf("unable to list items in bucket %q, %v", bucket, err) + } + + // Copy each object to the destination bucket + for _, item := range resp.Contents { + sourceKey := *item.Key + + // Get the file name + base := path.Base(sourceKey) + originalFileName, found := getOriginalName(filemappings, base) + if found { + base = originalFileName + } + // fmt.Printf("Base %s\n", base) + // fmt.Printf("Name: %s, Size: %d bytes\n", filePath, *item.Size) + + // destKey := destPrefix + sourceKey[len(prefix):] + destKey := destPrefix + strings.ReplaceAll(sourceKey, path.Base(sourceKey), base) + + _, err := svc.CopyObject(context.TODO(), &s3.CopyObjectInput{ + Bucket: aws.String(destBucket), + CopySource: aws.String(bucket + "/" + sourceKey), + Key: aws.String(destKey), + }) + if err != nil { + log.Fatalf("unable to copy item %q, %v", sourceKey, err) + } + + fmt.Printf("Copied %s to %s\n", sourceKey, destKey) + } + + fmt.Println("All files copied successfully!") +} + +func SaveFileS3(openInfoBucket string, openInfoPrefix string, filename string, buf []byte) error { + bucket := openInfoBucket //"dev-openinfopub" + prefix := openInfoPrefix //"poc/packages/HSG_2024_40515/" // Folder prefix in the bucket + + svc := CreateS3Client() + + // Upload the HTML content to S3 + _, err := svc.PutObject(context.TODO(), &s3.PutObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(prefix + filename), + Body: bytes.NewReader(buf), + }) + if err != nil { + fmt.Println("Error uploading file:", err) + } else { + fmt.Println("File uploaded successfully!") + } + + return err +} + +func GetFileFromS3(openInfoBucket string, openInfoPrefix string, filename string) *s3.GetObjectOutput { + bucket := openInfoBucket //"dev-openinfopub" + prefix := openInfoPrefix //"poc/packages/HSG_2024_40515/" // Folder prefix in the bucket + + svc := CreateS3Client() + + // Download the XML file from S3 + result, err := svc.GetObject(context.TODO(), &s3.GetObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(prefix + filename), + }) + if err != nil { + panic(err) + } + // defer result.Body.Close() + + return result +} + +func ReadSiteMapIndexS3(openInfoBucket string, openInfoPrefix string, filename string) SitemapIndex { + result := GetFileFromS3(openInfoBucket, openInfoPrefix, filename) + defer result.Body.Close() + + // Parse the XML content + var sitemapindex SitemapIndex + err := xml.NewDecoder(result.Body).Decode(&sitemapindex) + if err != nil { + panic(err) + } + + return sitemapindex +} + +func ReadSiteMapPageS3(openInfoBucket string, openInfoPrefix string, filename string) UrlSet { + result := GetFileFromS3(openInfoBucket, openInfoPrefix, filename) + defer result.Body.Close() + + // Parse the XML content + var urlset UrlSet + err := xml.NewDecoder(result.Body).Decode(&urlset) + if err != nil { + panic(err) + } + + return urlset +} + +func SaveSiteMapIndexS3(openInfoBucket string, openInfoPrefix string, filename string, updatedsitemapindex SitemapIndex) error { + // Serialize the updated XML + updatedXML, err := xml.MarshalIndent(updatedsitemapindex, "", " ") + if err != nil { + panic(err) + } + + // Add the XML header and xmlns attribute + xmlHeader := []byte(xml.Header) + xmlns := []byte(` xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"`) + updatedXML = append(xmlHeader, updatedXML...) + updatedXML = bytes.Replace(updatedXML, []byte(""), []byte(""), 1) + + return SaveFileS3(openInfoBucket, openInfoPrefix, filename, updatedXML) +} + +func SaveSiteMapPageS3(openInfoBucket string, openInfoPrefix string, filename string, updatedurlset UrlSet) error { + // Serialize the updated XML + updatedXML, err := xml.MarshalIndent(updatedurlset, "", " ") + if err != nil { + panic(err) + } + + // Add the XML header and xmlns attribute + xmlHeader := []byte(xml.Header) + xmlns := []byte(` xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"`) + updatedXML = append(xmlHeader, updatedXML...) + updatedXML = bytes.Replace(updatedXML, []byte(""), []byte(""), 1) + + return SaveFileS3(openInfoBucket, openInfoPrefix, filename, updatedXML) +} + +func RemoveFromS3(openInfoBucket string, openInfoPrefix string) error { + + svc := CreateS3Client() + + // Upload the HTML content to S3 + _, err := svc.DeleteObject(context.TODO(), &s3.DeleteObjectInput{ + Bucket: aws.String(openInfoBucket), + Key: aws.String(openInfoPrefix), + }) + if err != nil { + fmt.Println("Error removing file:", err) + } else { + fmt.Println("File removed successfully!") + } + + return err +} + +func RemoveFolderFromS3(openInfoBucket string, openInfoPrefix string) error { + + svc := CreateS3Client() + + // List objects in the folder + resp, err := svc.ListObjectsV2(context.TODO(), &s3.ListObjectsV2Input{ + Bucket: aws.String(openInfoBucket), + Prefix: aws.String(openInfoPrefix), + }) + if err != nil { + return fmt.Errorf("unable to list items in bucket %q, %w", openInfoBucket, err) + } + + // Delete each object in the folder + for _, item := range resp.Contents { + _, err := svc.DeleteObject(context.TODO(), &s3.DeleteObjectInput{ + Bucket: aws.String(openInfoBucket), + Key: aws.String(*item.Key), + }) + if err != nil { + log.Fatalf("unable to delete item %q, %v", *item.Key, err) + } + fmt.Printf("Deleted %s\n", *item.Key) + } + + fmt.Println("Folder and its contents deleted successfully!") + return err +} + +func getFileType(filePath string) string { + // Get the file extension + ext := filepath.Ext(filePath) + if ext == "" { + return "unknown" + } + + return ext +} + +// Function to check if a string array contains a specific string +func contains(arr []string, str string) bool { + for _, v := range arr { + if v == str { + return true + } + } + return false +} + +// Get Original Filename +func getOriginalName(filemappings []AdditionalFile, key string) (string, bool) { + for _, item := range filemappings { + // Parse the URL + parsedURL, err := url.Parse(item.S3uripath) + if err != nil { + fmt.Printf("Error parsing URL: %v\n", err) + continue + } + + // Extract the path + urlPath := parsedURL.Path + + // Get the base (last segment) of the path + base := path.Base(urlPath) + + if base == key { + return item.Filename, true + } + } + return "", false +} diff --git a/computingservices/OpenInfoServices/lib/db/dbservices.go b/computingservices/OpenInfoServices/lib/db/dbservices.go new file mode 100644 index 000000000..5c432d2aa --- /dev/null +++ b/computingservices/OpenInfoServices/lib/db/dbservices.go @@ -0,0 +1,417 @@ +package dbservice + +import ( + "database/sql" + "fmt" + "log" + "time" + + _ "github.com/lib/pq" +) + +const ( + dateformat = "2006-01-02" + openstate_ready = "Ready to Publish" + openstate_published = "Published" + openstate_unpublish = "Unpublish" + oirequesttype_publish = "Publish" + oirequesttype_unpublish = "Unpublish Request" + openstatus_ready = "ready" + openstatus_unpublish = "unpublished" +) + +type AdditionalFile struct { + Additionalfileid int + Filename string + S3uripath string +} + +type OpenInfoRecord struct { + Openinfoid int + Foiministryrequestid int + Axisrequestid string + Description string + Published_date string + Contributor string + Applicant_type string + Fees float64 + BCgovcode string + Sitemap_pages string + Type string + Additionalfiles []AdditionalFile +} + +func Conn(dsn string) (*sql.DB, error) { + db, err := sql.Open("postgres", dsn) + if err != nil { + return nil, fmt.Errorf("failed to connect to database: %w", err) + } + + err = db.Ping() + if err != nil { + db.Close() + return nil, fmt.Errorf("failed to ping database: %w", err) + } + + // fmt.Println("Successfully connected!") + return db, nil +} + +func UpdateOIRecordStatus(db *sql.DB, foiministryrequestid int, publishingstatus string, message string) error { + + // Begin a transaction + tx, err := db.Begin() + if err != nil { + log.Fatalf("Error beginning transaction: %v", err) + } + + // Step 1: Set previous versions' isactive to false + _, err = tx.Exec(`UPDATE public."FOIOpenInformationRequests" SET isactive = false, updated_at = $2, updatedby = 'publishingservice' WHERE foiministryrequest_id = $1 AND isactive = true`, foiministryrequestid, time.Now()) + if err != nil { + tx.Rollback() + log.Fatalf("Error updating previous versions: %v", err) + } + + // Step 2: Insert a new version of the record + _, err = tx.Exec(` + INSERT INTO public."FOIOpenInformationRequests" (version, foiministryrequest_id, foiministryrequestversion_id, oipublicationstatus_id, oiexemption_id, oiassignedto, oiexemptionapproved, pagereference, iaorationale, oifeedback, publicationdate, isactive, copyrightsevered, created_at, updated_at, createdby, updatedby, processingstatus, processingmessage, sitemap_pages) + SELECT version + 1, foiministryrequest_id, foiministryrequestversion_id, oipublicationstatus_id, oiexemption_id, oiassignedto, oiexemptionapproved, pagereference, iaorationale, oifeedback, publicationdate, true, copyrightsevered, $2, NULL, 'publishingservice', 'publishingservice', $3, $4, sitemap_pages + FROM public."FOIOpenInformationRequests" + WHERE foiministryrequest_id = $1 AND isactive = false + ORDER BY version DESC + LIMIT 1 + `, foiministryrequestid, time.Now(), publishingstatus, message) + if err != nil { + tx.Rollback() + log.Fatalf("Error inserting new version for status: %v", err) + } + + // Commit the transaction + err = tx.Commit() + if err != nil { + log.Fatalf("Error committing transaction: %v", err) + } + + return err +} + +func GetOIRecordsForPrePublishing(db *sql.DB) ([]OpenInfoRecord, error) { + var records []OpenInfoRecord + // var record OpenInfoRecord + + // Get the current time + now := time.Now() + // Add 24 hours to the current time + tomorrow := now.Add(24 * time.Hour) + + qry := fmt.Sprintf(` + SELECT + oi.foiopeninforequestid, + mr.foiministryrequestid, + mr.axisrequestid, + mr.description, + oi.publicationdate, + pa.name as contributor, + ac.name as applicant_type, + COALESCE((fee.feedata->>'amountpaid')::Numeric, 0) as fees, + LOWER(pa.bcgovcode), + COALESCE(oi.sitemap_pages, '') as sitemap_pages, + 'publish' as type, + oifiles.additionalfileid, + oifiles.filename, + oifiles.s3uripath + FROM public."FOIMinistryRequests" mr + INNER JOIN public."FOIRequests" r on mr.foirequest_id = r.foirequestid and mr.foirequestversion_id = r.version + INNER JOIN public."ProgramAreas" pa on mr.programareaid = pa.programareaid + INNER JOIN public."ApplicantCategories" ac on r.applicantcategoryid = ac.applicantcategoryid + LEFT JOIN ( + SELECT ministryrequestid, MAX(version) as max_version + FROM public."FOIRequestCFRFees" + GROUP BY ministryrequestid + ) latest_payment on mr.foiministryrequestid = latest_payment.ministryrequestid + LEFT JOIN public."FOIRequestCFRFees" fee on mr.foiministryrequestid = fee.ministryrequestid + and latest_payment.max_version = fee.version and mr.version = fee.ministryrequestversion + INNER JOIN public."FOIOpenInformationRequests" oi on mr.foiministryrequestid = oi.foiministryrequest_id and oi.isactive = TRUE + INNER JOIN public."OpenInformationStatuses" oistatus on mr.oistatus_id = oistatus.oistatusid + INNER JOIN public."OpenInfoPublicationStatuses" oirequesttype on oi.oipublicationstatus_id = oirequesttype.oipublicationstatusid + LEFT JOIN public."FOIOpenInfoAdditionalFiles" oifiles on mr.foiministryrequestid = oifiles.ministryrequestid + WHERE (oistatus.name = '%s' or oistatus.name = '%s') and oirequesttype.name = '%s' and oi.publicationdate < '%s' and oi.processingstatus is NULL and mr.isactive = TRUE + `, openstate_ready, openstate_published, oirequesttype_publish, tomorrow.Format(dateformat)) + + rows, err := db.Query(qry) + if err != nil { + return records, fmt.Errorf("query failed: %w", err) + } + defer rows.Close() + + oiRecordsMap := make(map[int]*OpenInfoRecord) + for rows.Next() { + var openinfoid, foiministryrequestid, additionalfileid sql.NullInt64 + var axisrequestid, description, published_date, contributor, applicant_type, bcgovcode, sitemap_pages, queuetype, filename, s3uripath sql.NullString + var fees sql.NullFloat64 + + // err := rows.Scan( + // &record.Openinfoid, + // &record.Foiministryrequestid, + // &record.Axisrequestid, + // &record.Description, + // &record.Published_date, + // &record.Contributor, + // &record.Applicant_type, + // &record.Fees, + // &record.BCgovcode, + // &record.Sitemap_pages, + // &record.Type, + // ) + err := rows.Scan( + &openinfoid, + &foiministryrequestid, + &axisrequestid, + &description, + &published_date, + &contributor, + &applicant_type, + &fees, + &bcgovcode, + &sitemap_pages, + &queuetype, + &additionalfileid, + &filename, + &s3uripath, + ) + if err != nil { + return records, fmt.Errorf("failed to retrieve query result for prepublish: %w", err) + } + + if openinfoid.Valid && foiministryrequestid.Valid && axisrequestid.Valid && description.Valid && published_date.Valid && contributor.Valid && applicant_type.Valid && fees.Valid && bcgovcode.Valid && sitemap_pages.Valid && queuetype.Valid { + if _, ok := oiRecordsMap[int(foiministryrequestid.Int64)]; !ok { + oiRecordsMap[int(foiministryrequestid.Int64)] = &OpenInfoRecord{ + Openinfoid: int(openinfoid.Int64), + Foiministryrequestid: int(foiministryrequestid.Int64), + Axisrequestid: axisrequestid.String, + Description: description.String, + Published_date: published_date.String, + Contributor: contributor.String, + Applicant_type: applicant_type.String, + Fees: fees.Float64, + BCgovcode: bcgovcode.String, + Sitemap_pages: sitemap_pages.String, + Type: queuetype.String, + } + } + + if additionalfileid.Valid && filename.Valid && s3uripath.Valid { + oiRecordsMap[int(foiministryrequestid.Int64)].Additionalfiles = append(oiRecordsMap[int(foiministryrequestid.Int64)].Additionalfiles, AdditionalFile{ + Additionalfileid: int(additionalfileid.Int64), + Filename: filename.String, + S3uripath: s3uripath.String, + }) + } + } + + // records = append(records, record) + // fmt.Printf("ID: %s, Description: %s, Published Date: %s, Contributor: %s, Applicant Type: %s, Fees: %v\n", record.Axisrequestid, record.Description, record.Published_date, record.Contributor, record.Applicant_type, record.Fees) + } + + err = rows.Err() + if err != nil { + return records, fmt.Errorf("failed to retrieve query result: %w", err) + } + + for _, record := range oiRecordsMap { + records = append(records, *record) + } + + return records, nil +} + +func GetOIRecordsForPublishing(db *sql.DB) ([]OpenInfoRecord, error) { + var records []OpenInfoRecord + var record OpenInfoRecord + + qry := fmt.Sprintf(` + SELECT + oi.foiopeninforequestid, + mr.foiministryrequestid, + mr.axisrequestid, + mr.description, + oi.publicationdate, + pa.name as contributor, + ac.name as applicant_type, + COALESCE((fee.feedata->>'amountpaid')::Numeric, 0) as fees, + LOWER(pa.bcgovcode) as bcgovcode + FROM public."FOIMinistryRequests" mr + INNER JOIN public."FOIRequests" r on mr.foirequest_id = r.foirequestid and mr.foirequestversion_id = r.version + INNER JOIN public."ProgramAreas" pa on mr.programareaid = pa.programareaid + INNER JOIN public."ApplicantCategories" ac on r.applicantcategoryid = ac.applicantcategoryid + LEFT JOIN ( + SELECT ministryrequestid, MAX(version) as max_version + FROM public."FOIRequestCFRFees" + GROUP BY ministryrequestid + ) latest_payment on mr.foiministryrequestid = latest_payment.ministryrequestid + LEFT JOIN public."FOIRequestCFRFees" fee on mr.foiministryrequestid = fee.ministryrequestid + and latest_payment.max_version = fee.version and mr.version = fee.ministryrequestversion + INNER JOIN public."FOIOpenInformationRequests" oi on mr.foiministryrequestid = oi.foiministryrequest_id and oi.isactive = TRUE + WHERE oi.processingstatus = '%s' and mr.isactive = TRUE + `, openstatus_ready) + + rows, err := db.Query(qry) + if err != nil { + return records, fmt.Errorf("query failed: %w", err) + } + defer rows.Close() + + for rows.Next() { + err := rows.Scan( + &record.Openinfoid, + &record.Foiministryrequestid, + &record.Axisrequestid, + &record.Description, + &record.Published_date, + &record.Contributor, + &record.Applicant_type, + &record.Fees, + &record.BCgovcode) + if err != nil { + return records, fmt.Errorf("failed to retrieve query result: %w", err) + } + records = append(records, record) + fmt.Printf("ID: %s, Description: %s, Published Date: %s, Contributor: %s, Applicant Type: %s, Fees: %v\n", record.Axisrequestid, record.Description, record.Published_date, record.Contributor, record.Applicant_type, record.Fees) + } + + err = rows.Err() + if err != nil { + return records, fmt.Errorf("failed to retrieve query result: %w", err) + } + + return records, nil +} + +func GetOIRecordsForUnpublishing(db *sql.DB) ([]OpenInfoRecord, error) { + var records []OpenInfoRecord + var record OpenInfoRecord + + qry := fmt.Sprintf(` + SELECT + oi.foiopeninforequestid, + mr.axisrequestid, + COALESCE(oi.sitemap_pages, '') as sitemap_pages, + 'unpublish' as type + FROM public."FOIOpenInformationRequests" oi + INNER JOIN public."FOIMinistryRequests" mr on oi.foiministryrequest_id = mr.foiministryrequestid and mr.isactive = TRUE + INNER JOIN public."OpenInformationStatuses" oistatus on mr.oistatusid = oistatus.oistatusid + INNER JOIN public."OpenInfoPublicationStatuses" oirequesttype on oi.oipublicationstatus_id = oirequesttype.oipublicationstatusid + WHERE oirequesttype.name = '%s' and oi.processingstatus != '%s' and oi.isactive = TRUE + `, oirequesttype_unpublish, openstatus_unpublish) + + rows, err := db.Query(qry) + if err != nil { + return records, fmt.Errorf("query failed: %w", err) + } + defer rows.Close() + + for rows.Next() { + err := rows.Scan( + &record.Openinfoid, + &record.Axisrequestid, + &record.Sitemap_pages, + &record.Type, + ) + if err != nil { + return records, fmt.Errorf("failed to retrieve query result: %w", err) + } + records = append(records, record) + fmt.Printf("ID: %s, Description: %s, Published Date: %s, Contributor: %s, Applicant Type: %s, Fees: %v\n", record.Axisrequestid, record.Description, record.Published_date, record.Contributor, record.Applicant_type, record.Fees) + } + + err = rows.Err() + if err != nil { + return records, fmt.Errorf("failed to retrieve query result: %w", err) + } + + return records, nil +} + +func UpdateOIRecordState(db *sql.DB, foiministryrequestid int, publishingstatus string, message string, sitemap_pages string) error { + + // Begin a transaction + tx, err := db.Begin() + if err != nil { + log.Fatalf("Error beginning transaction: %v", err) + } + + // // Retrieve oipublicationstatus_id based on oistatus + // var oistatusid int + // err = tx.QueryRow(`SELECT oistatusid FROM public."OpenInformationStatuses" WHERE name = $1`, state).Scan(&oistatusid) + // if err != nil { + // tx.Rollback() + // log.Fatalf("Error retrieving oistatusid: %v", err) + // } + + // Step 1: Set previous versions' isactive to false + _, err = tx.Exec(`UPDATE public."FOIOpenInformationRequests" SET isactive = false, updated_at = $2, updatedby = 'publishingservice' WHERE foiministryrequest_id = $1 AND isactive = true`, foiministryrequestid, time.Now()) + if err != nil { + tx.Rollback() + log.Fatalf("Error updating previous versions: %v", err) + } + + // Step 2: Insert a new version of the record + _, err = tx.Exec(` + INSERT INTO public."FOIOpenInformationRequests" (version, foiministryrequest_id, foiministryrequestversion_id, oipublicationstatus_id, oiexemption_id, oiassignedto, oiexemptionapproved, pagereference, iaorationale, oifeedback, publicationdate, isactive, copyrightsevered, created_at, updated_at, createdby, updatedby, processingstatus, processingmessage, sitemap_pages) + SELECT version + 1, foiministryrequest_id, foiministryrequestversion_id, oipublicationstatus_id, oiexemption_id, oiassignedto, oiexemptionapproved, pagereference, iaorationale, oifeedback, publicationdate, true, copyrightsevered, $2, NULL, 'publishingservice', NULL, $3, $4, $5 + FROM public."FOIOpenInformationRequests" + WHERE foiministryrequest_id = $1 AND isactive = false + ORDER BY version DESC + LIMIT 1 + `, foiministryrequestid, time.Now(), publishingstatus, message, sitemap_pages) + if err != nil { + tx.Rollback() + log.Fatalf("Error inserting new version for sitemaps: %v", err) + } + + // Commit the transaction + err = tx.Commit() + if err != nil { + log.Fatalf("Error committing transaction: %v", err) + } + + return err +} + +func LogError(db *sql.DB, foiministryrequestid int, publishingstatus string, message string) error { + + // Begin a transaction + tx, err := db.Begin() + if err != nil { + log.Fatalf("Error beginning transaction: %v", err) + } + + // Step 1: Set previous versions' isactive to false + _, err = tx.Exec(`UPDATE public."FOIOpenInformationRequests" SET isactive = false, updated_at = $2, updatedby = 'publishingservice' WHERE foiministryrequest_id = $1 AND isactive = true`, foiministryrequestid, time.Now()) + if err != nil { + tx.Rollback() + log.Fatalf("Error updating previous versions: %v", err) + } + + // Step 2: Insert a new version of the record + _, err = tx.Exec(` + INSERT INTO public."FOIOpenInformationRequests" (version, foiministryrequest_id, foiministryrequestversion_id, oipublicationstatus_id, oiexemption_id, oiassignedto, oiexemptionapproved, pagereference, iaorationale, oifeedback, publicationdate, isactive, copyrightsevered, created_at, updated_at, createdby, updatedby, processingstatus, processingmessage, sitemap_pages) + SELECT version + 1, foiministryrequest_id, foiministryrequestversion_id, oipublicationstatus_id, oiexemption_id, oiassignedto, oiexemptionapproved, pagereference, iaorationale, oifeedback, publicationdate, true, copyrightsevered, $2, NULL, 'publishingservice', NULL, $3, $4, sitemap_pages + FROM public."FOIOpenInformationRequests" + WHERE foiministryrequest_id = $1 AND isactive = false + ORDER BY version DESC + LIMIT 1 + `, foiministryrequestid, time.Now(), publishingstatus, message) + if err != nil { + tx.Rollback() + log.Fatalf("Error inserting error message: %v", err) + } + + // Commit the transaction + err = tx.Commit() + if err != nil { + log.Fatalf("Error committing transaction: %v", err) + } + + return err +} diff --git a/computingservices/OpenInfoServices/lib/files/html.go b/computingservices/OpenInfoServices/lib/files/html.go new file mode 100644 index 000000000..7c0a1d06f --- /dev/null +++ b/computingservices/OpenInfoServices/lib/files/html.go @@ -0,0 +1,43 @@ +package files + +import ( + "bytes" + "html/template" +) + +type MetaTag struct { + Name string + Content string +} + +type Link struct { + FileName string + URL string +} + +type TemplateVariables struct { + Title string + MetaTags []MetaTag + Links []Link + Content string +} + +func CreateHTML(variables TemplateVariables) bytes.Buffer { + // Parse the HTML template file + t, err := template.ParseFiles("./templates/template.html") + if err != nil { + panic(err) + } + + // Create a buffer to hold the rendered template + var buf bytes.Buffer + + // Execute the template and write the output to the file + err = t.Execute(&buf, variables) + if err != nil { + panic(err) + } + + // fmt.Println(buf.String()) + return buf +} diff --git a/computingservices/OpenInfoServices/lib/queue/redis.go b/computingservices/OpenInfoServices/lib/queue/redis.go new file mode 100644 index 000000000..c38eedf72 --- /dev/null +++ b/computingservices/OpenInfoServices/lib/queue/redis.go @@ -0,0 +1,36 @@ +package redislib + +import ( + myconfig "OpenInfoServices/config" + "context" + "fmt" + "log" + + "github.com/redis/go-redis/v9" +) + +// Initialize a Redis client +func CreateRedisClient() *redis.Client { + queuehost, queueport, queuepassword := myconfig.GetRedis() + + rdb := redis.NewClient(&redis.Options{ + Addr: queuehost + ":" + queueport, + Password: queuepassword, + DB: 0, // Use default DB + }) + return rdb +} + +// Write a message to the Redis queue +func WriteMessage(rdb *redis.Client, queueName string, message string) { + err := rdb.LPush(context.Background(), queueName, message).Err() + if err != nil { + log.Fatalf("could not write message to queue: %v", err) + } + fmt.Printf("Message written to queue: %s\n", message) +} + +// Read a message from the Redis queue +func ReadMessage(rdb *redis.Client, queueName string) (string, error) { + return rdb.RPop(context.Background(), queueName).Result() +} diff --git a/computingservices/OpenInfoServices/main.go b/computingservices/OpenInfoServices/main.go new file mode 100644 index 000000000..c84d2d20c --- /dev/null +++ b/computingservices/OpenInfoServices/main.go @@ -0,0 +1,344 @@ +package main + +import ( + // "encoding/xml" + myconfig "OpenInfoServices/config" + "OpenInfoServices/lib/awslib" + dbservice "OpenInfoServices/lib/db" + redislib "OpenInfoServices/lib/queue" + oiservices "OpenInfoServices/services" + "encoding/json" + "errors" + "fmt" + "log" + "os" + "path/filepath" + "strconv" + "time" + + "github.com/redis/go-redis/v9" +) + +const ( + dateformat = "2006-01-02" + openstatus_sitemap = "ready for crawling" + openstatus_sitemap_message = "sitemap ready" +) + +var ( + //DB + host string + port string + user string + password string + dbname string + + //Redis + queue string + + //S3 + s3url string + oibucket string + oiprefix string + sitemapprefix string + sitemaplimit int + + //Others + env string +) + +func main() { + + //Only enable when running locally for using .env + // setEnvForLocal(".env") + + if len(os.Args) < 2 { + fmt.Println("Please provide a parameter: dequeue, enqueue, sitemap or unpublish") + return + } + + host, port, user, password, dbname = myconfig.GetDB() + s3url, oibucket, oiprefix, sitemapprefix, sitemaplimit = myconfig.GetS3Path() + env, queue = myconfig.GetOthers() + + dsn := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=disable", + host, port, user, password, dbname) + + param := os.Args[1] + + switch param { + case "dequeue": + + // Connect DB + db, err1 := dbservice.Conn(dsn) + if err1 != nil { + log.Fatalf("%v", err1) + return + } + defer db.Close() + + // Create a Redis client + rdb := redislib.CreateRedisClient() + // Define the queue name + queueName := queue + + // Subscribe to the queue and read messages + for { + message, err := redislib.ReadMessage(rdb, queueName) + if err != nil { + if err == redis.Nil { + fmt.Println("No messages in queue") + } else { + // log.Fatalf("could not read message from queue: %v", err) + log.Printf("could not read message from queue: %v", err) + } + time.Sleep(1 * time.Second) + continue + } + + fmt.Printf("Message read from queue: %s\n", message) + + var msg oiservices.OpenInfoMessage + err = json.Unmarshal([]byte(message), &msg) + if err != nil { + log.Printf("could not parse json string: %v", err) + continue + } + + fmt.Printf("openinfoid: %d\n", msg.Openinfoid) + fmt.Printf("foiministryrequestid: %d\n", msg.Foiministryrequestid) + fmt.Printf("published_date: %s\n", msg.Published_date) + fmt.Printf("ID: %s, Description: %s, Published Date: %s, Contributor: %s, Applicant Type: %s, Fees: %v, Files: %v\n", msg.Axisrequestid, msg.Description, msg.Published_date, msg.Contributor, msg.Applicant_type, msg.Fees, msg.AdditionalFiles) + + if msg.Type == "publish" { + oiservices.Publish(msg, db) + } else if msg.Type == "unpublish" { + oiservices.Unpublish(msg, db) + } else { + fmt.Println("Unknown message type") + } + } + + case "enqueueforpublish": + + // Connect DB + db, err1 := dbservice.Conn(dsn) + if err1 != nil { + log.Fatalf("%v", err1) + return + } + defer db.Close() + + // Get the open info record, which are ready for publishing + records, err := dbservice.GetOIRecordsForPrePublishing(db) + if err != nil { + log.Fatalf("%v", err) + return + } + + // Create a Redis client + rdb := redislib.CreateRedisClient() + + // Define the queue name + queueName := queue + + for _, item := range records { + fmt.Printf("ID: %s, Description: %s, Published Date: %s, Contributor: %s, Applicant Type: %s, Fees: %v, Files: %v\n", item.Axisrequestid, item.Description, item.Published_date, item.Contributor, item.Applicant_type, item.Fees, item.Additionalfiles) + + jsonData, err := json.Marshal(item) + if err != nil { + panic(err) + } + + // Write a message to the queue + redislib.WriteMessage(rdb, queueName, string(jsonData)) + } + + case "enqueueforunpublish": + // Connect DB + db, err1 := dbservice.Conn(dsn) + if err1 != nil { + log.Fatalf("%v", err1) + return + } + defer db.Close() + + // Get the open info record, which are ready for publishing + records, err := dbservice.GetOIRecordsForUnpublishing(db) + if err != nil { + log.Fatalf("%v", err) + return + } + + // Create a Redis client + rdb := redislib.CreateRedisClient() + + // Define the queue name + queueName := queue + + for _, item := range records { + fmt.Printf("ID: %s, Sitemap_Pages: %s, Type: %s\n", item.Axisrequestid, item.Sitemap_pages, item.Type) + + jsonData, err := json.Marshal(item) + if err != nil { + panic(err) + } + + // Write a message to the queue + redislib.WriteMessage(rdb, queueName, string(jsonData)) + } + + case "sitemap": + + // Connect DB + db, err1 := dbservice.Conn(dsn) + if err1 != nil { + log.Fatalf("%v", err1) + return + } + defer db.Close() + + // Get the open info record, which are ready for XML + records, err := dbservice.GetOIRecordsForPublishing(db) + if err != nil { + log.Fatalf("%v", err) + return + } + + // Get the last sitemap_page from s3 + destBucket := env + "-" + oibucket + destPrefix := sitemapprefix + + sitemapindex := awslib.ReadSiteMapIndexS3(destBucket, destPrefix, "sitemap_index.xml") + urlset := awslib.ReadSiteMapPageS3(destBucket, destPrefix, "sitemap_pages_"+strconv.Itoa(len(sitemapindex.Sitemaps))+".xml") + + initialSitemapsCount := len(sitemapindex.Sitemaps) + + // Get the current time + now := time.Now() + lastMod := now.Format(dateformat) + + // Insert to XML + for i, item := range records { + fmt.Printf("ID: %s, Description: %s, Published Date: %s, Contributor: %s, Applicant Type: %s, Fees: %v\n", item.Axisrequestid, item.Description, item.Published_date, item.Contributor, item.Applicant_type, item.Fees) + + // Save sitemap_pages_.xml which reached 5000 limit + if len(urlset.Urls) >= sitemaplimit { + // Save sitemap_pages_.xml + err = awslib.SaveSiteMapPageS3(destBucket, destPrefix, "sitemap_pages_"+strconv.Itoa(len(sitemapindex.Sitemaps))+".xml", urlset) + if err != nil { + log.Fatalf("failed to save sitemap_pages_"+strconv.Itoa(len(sitemapindex.Sitemaps))+".xml: %v", err) + return + } + + // Update sitemap index + sitemap := awslib.SiteMap{ + Loc: s3url + destBucket + "/" + destPrefix + "sitemap_pages_" + strconv.Itoa(len(sitemapindex.Sitemaps)+1) + ".xml", + LastMod: lastMod, + } + sitemapindex.Sitemaps = append(sitemapindex.Sitemaps, sitemap) + + // Clear urlset for entries already saved into sitemap_pages + urlset.Urls = []awslib.Url{} + } + + url := awslib.Url{ + Loc: s3url + destBucket + "/" + oiprefix + item.Axisrequestid + "/" + item.Axisrequestid + ".html", + LastMod: lastMod, + } + urlset.Urls = append(urlset.Urls, url) + + // Save sitemap_pages file name + records[i].Sitemap_pages = "sitemap_pages_" + strconv.Itoa(len(sitemapindex.Sitemaps)) + ".xml" + } + + // Save new entries to sitemap_pages_.xml + err = awslib.SaveSiteMapPageS3(destBucket, destPrefix, "sitemap_pages_"+strconv.Itoa(len(sitemapindex.Sitemaps))+".xml", urlset) + if err != nil { + log.Fatalf("failed to save sitemap_pages_"+strconv.Itoa(len(sitemapindex.Sitemaps))+".xml: %v", err) + return + } + + // Update sitemap_index.xml if there are new sitemap_pages_.xml created + if len(sitemapindex.Sitemaps) > initialSitemapsCount { + err = awslib.SaveSiteMapIndexS3(destBucket, destPrefix, "sitemap_index.xml", sitemapindex) + if err != nil { + log.Fatalf("failed to save sitemap_index.xml: %v", err) + return + } + } + + // Update openinfo table status & sitemap_pages file name to DB + for _, item := range records { + err = dbservice.UpdateOIRecordState(db, item.Foiministryrequestid, openstatus_sitemap, openstatus_sitemap_message, item.Sitemap_pages) + if err != nil { + log.Fatalf("%v", err) + return + } + } + + case "test": + //----- put testing script here for manual test ----- + + // test unpublish + // Connect DB + db, err1 := dbservice.Conn(dsn) + if err1 != nil { + log.Fatalf("%v", err1) + return + } + defer db.Close() + + // Create a Redis client + rdb := redislib.CreateRedisClient() + // Define the queue name + queueName := queue + + // Subscribe to the queue and read messages + message, err := redislib.ReadMessage(rdb, queueName) + if err != nil { + log.Fatalf("%v", err) + return + } + + fmt.Printf("Message read from queue: %s\n", message) + + var msg oiservices.OpenInfoMessage + err = json.Unmarshal([]byte(message), &msg) + if err != nil { + log.Fatalf("could not parse json string: %v", err) + return + } + + fmt.Printf("openinfoid: %d\n", msg.Openinfoid) + fmt.Printf("foiministryrequestid: %d\n", msg.Foiministryrequestid) + fmt.Printf("published_date: %s\n", msg.Published_date) + fmt.Printf("ID: %s, Description: %s, Published Date: %s, Contributor: %s, Applicant Type: %s, Fees: %v\n", msg.Axisrequestid, msg.Description, msg.Published_date, msg.Contributor, msg.Applicant_type, msg.Fees) + + oiservices.Unpublish(msg, db) + + //----- test script end ----- + + default: + fmt.Println("Unknown parameter. Please use 'dequeue', 'enqueueforpublish', 'sitemap' or 'enqueueforunpublish'") + } +} + +func JoinStr(a string, b string) (string, error) { + if a == "" || b == "" { + return "", errors.New("empty string") + } + return a + b, nil +} + +func setEnvForLocal(path string) { + absolutePath, err := filepath.Abs(path) + if err != nil { + log.Fatalf("failed to get absolute path: %v", err) + } + + err = os.Setenv("ENVFILE_PATH", absolutePath) + if err != nil { + log.Fatalf("failed to set environment variable: %v", err) + } +} diff --git a/computingservices/OpenInfoServices/main_test.go b/computingservices/OpenInfoServices/main_test.go new file mode 100644 index 000000000..17987dc93 --- /dev/null +++ b/computingservices/OpenInfoServices/main_test.go @@ -0,0 +1,12 @@ +package main + +import ( + "testing" +) + +func TestPrint(t *testing.T) { + result, err := JoinStr("ab", "c") + if result != "abc" || (err != nil) { + t.Errorf("Error") + } +} diff --git a/computingservices/OpenInfoServices/services/messagehandler.go b/computingservices/OpenInfoServices/services/messagehandler.go new file mode 100644 index 000000000..5a25427e7 --- /dev/null +++ b/computingservices/OpenInfoServices/services/messagehandler.go @@ -0,0 +1,173 @@ +package oiservices + +import ( + // "encoding/xml" + myconfig "OpenInfoServices/config" + "OpenInfoServices/lib/awslib" + dbservice "OpenInfoServices/lib/db" + "OpenInfoServices/lib/files" + "database/sql" + "fmt" + "log" + "strings" + "time" +) + +const ( + dateformat = "2006-01-02" + dateformat_month = "01" + dateformat_year = "2006" + openstatus_ready = "ready for sitemap" + openstatus_ready_message = "html ready" + openstatus_unpublish = "unpublished" + openstatus_unpublish_message = "entry removed from sitemap" +) + +type OpenInfoMessage struct { + Openinfoid int `json:"openinfoid"` + Foiministryrequestid int `json:"foiministryrequestid"` + Axisrequestid string `json:"axisrequestid"` + Description string `json:"description"` + Published_date string `json:"published_date"` + Contributor string `json:"contributor"` + Applicant_type string `json:"applicant_type"` + Fees float32 `json:"fees"` + BCgovcode string `json:"bcgovcode"` + Type string `json:"type"` + Sitemap_pages string `json:"sitemap_pages"` + AdditionalFiles []awslib.AdditionalFile `json:"additionalfiles"` +} + +var ( + //S3 + s3url string + oibucket string + oiprefix string + sitemapprefix string + + env string +) + +func Publish(msg OpenInfoMessage, db *sql.DB) { + + s3url, oibucket, oiprefix, sitemapprefix, _ = myconfig.GetS3Path() + env, _ = myconfig.GetOthers() + + oibucket := env + "-" + oibucket + + // Get file info from s3 bucket folder + var result awslib.ScanResult + result, err := awslib.ScanS3(msg.BCgovcode+"-"+env+"-e", msg.Axisrequestid+"/openinfo/", s3url+oibucket+"/"+oiprefix+msg.Axisrequestid+"/", msg.AdditionalFiles) + if err != nil { + log.Fatalf("%v", err) + return + } + fmt.Printf("Combined letter names: %s, letter size: %s mb\n", result.LetterNames, result.LetterSizes) + fmt.Printf("Combined file names: %s, file size: %s mb\n", result.FileNames, result.FileSizes) + + // Get the current time + now := time.Now() + // Get the Unix timestamp + unixTimestamp := now.Unix() + + // Define the data to be passed to the template + variables := files.TemplateVariables{ + Title: msg.Axisrequestid, + MetaTags: []files.MetaTag{ + {Name: "dc.title", Content: "FOI Request - " + msg.Axisrequestid}, + {Name: "dc.description", Content: msg.Description}, + {Name: "high_level_subject", Content: "FOI Request"}, + {Name: "dc.subject", Content: "FOI Request"}, + {Name: "dc.published_date", Content: msg.Published_date}, + {Name: "timestamp", Content: fmt.Sprintf("%d", unixTimestamp)}, + {Name: "dc.contributor", Content: msg.Contributor}, + {Name: "recorduid", Content: msg.Axisrequestid}, + {Name: "recordurl", Content: s3url + oibucket + "/" + oiprefix + msg.Axisrequestid + "/" + msg.Axisrequestid + ".html"}, + {Name: "month", Content: now.Format(dateformat_month)}, + {Name: "year", Content: now.Format(dateformat_year)}, + {Name: "letter", Content: result.LetterNames}, + {Name: "letter_file_sizes", Content: result.LetterSizes}, + {Name: "notes", Content: ""}, + {Name: "notes_file_sizes", Content: ""}, + {Name: "files", Content: result.FileNames}, + {Name: "file_sizes", Content: result.FileSizes}, + {Name: "applicant_type", Content: msg.Applicant_type}, + {Name: "fees", Content: fmt.Sprintf("$%.2f", msg.Fees)}, + {Name: "position_title", Content: " "}, + {Name: "individual_name", Content: ""}, + }, + Links: result.Links, + Content: "FOI Request - " + msg.Axisrequestid + " " + msg.Description, + } + + buf := files.CreateHTML(variables) + err = awslib.SaveFileS3(msg.BCgovcode+"-"+env+"-e", msg.Axisrequestid+"/openinfo/", msg.Axisrequestid+".html", buf.Bytes()) + if err != nil { + log.Fatalf("%v", err) + return + } + + // Copy files to open info bucket + awslib.CopyS3(msg.BCgovcode+"-"+env+"-e", msg.Axisrequestid+"/openinfo/", msg.AdditionalFiles) + + // Update open info status in DB + err = dbservice.UpdateOIRecordStatus(db, msg.Foiministryrequestid, openstatus_ready, openstatus_ready_message) + if err != nil { + log.Fatalf("%v", err) + return + } +} + +func Unpublish(msg OpenInfoMessage, db *sql.DB) { + // Remove folder from s3 + + s3url, oibucket, oiprefix, sitemapprefix, _ = myconfig.GetS3Path() + env, _ = myconfig.GetOthers() + + destBucket := env + "-" + oibucket + destPrefix := oiprefix + err := awslib.RemoveFolderFromS3(destBucket, destPrefix+msg.Axisrequestid+"/") // Add a trailing slash to delete the folder + if err != nil { + log.Fatalf("%v", err) + return + } + + if msg.Sitemap_pages != "" { + // Remove entry from sitemap_pages_.xml + + // 1. get the last sitemap_page from s3 + prefix := sitemapprefix + urlset := awslib.ReadSiteMapPageS3(destBucket, prefix, msg.Sitemap_pages) + + // 2. find the index of the target entry + index := -1 + for i, item := range urlset.Urls { + if strings.Contains(item.Loc, msg.Axisrequestid) { + index = i + break + } + } + + // 3. remove entry from the array + if index != -1 { + urlset.Urls = append(urlset.Urls[:index], urlset.Urls[index+1:]...) + fmt.Println("Entry removed:", msg.Axisrequestid) + } else { + fmt.Println("Entry not found", msg.Axisrequestid) + } + + // 4. save sitemap_pages_.xml + err = awslib.SaveSiteMapPageS3(destBucket, prefix, msg.Sitemap_pages, urlset) + if err != nil { + log.Fatalf("failed to save "+msg.Sitemap_pages+": %v", err) + return + } + } + + // Update unpublish status to DB + err = dbservice.UpdateOIRecordStatus(db, msg.Foiministryrequestid, openstatus_unpublish, openstatus_unpublish_message) + if err != nil { + log.Fatalf("%v", err) + return + } +} diff --git a/computingservices/OpenInfoServices/templates/template.html b/computingservices/OpenInfoServices/templates/template.html new file mode 100644 index 000000000..4798c1159 --- /dev/null +++ b/computingservices/OpenInfoServices/templates/template.html @@ -0,0 +1,14 @@ + + +{{.Title}} +{{range .MetaTags}} + +{{end}} + + +{{range .Links}} +{{.FileName}}
+{{end}} +{{.Content}} + + \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 790944e6a..992f1dc4c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -317,6 +317,38 @@ services: - FOI_DB_PORT=${FOI_DB_PORT} - FOI_DB_USER=${FOI_DB_USER} - FOI_DB_PASSWORD=${FOI_DB_PASSWORD} + + foi-docreviewer-openinfoservice: + container_name: foi-docreviewer-openinfoservice + build: + context: ./computingservices/OpenInfoServices + dockerfile: Dockerfile.local + image: docrevieweropeninfoserviceimage + stdin_open: true + tty: true + networks: + services-network: + aliases: + - docrevieweropeninfoservice + environment: + - FOI_DB_USER=${FOI_DB_USER} + - FOI_DB_PASSWORD=${FOI_DB_PASSWORD} + - FOI_DB_NAME=${FOI_DB_NAME} + - FOI_DB_HOST=${FOI_DB_HOST} + - FOI_DB_PORT=${FOI_DB_PORT} + - OI_REDIS_HOST=${OI_REDIS_HOST} + - OI_REDIS_PORT=${OI_REDIS_PORT} + - OI_REDIS_PASSWORD=${OI_REDIS_PASSWORD} + - OI_QUEUE_NAME=${OI_QUEUE_NAME} + - OI_S3_ENV=${OI_S3_ENV} + - OI_S3_HOST=${OI_S3_HOST} + - OI_S3_REGION=${OI_S3_REGION} + - OI_S3_BUCKET=${OI_S3_BUCKET} + - OI_ACCESS_KEY=${OI_ACCESS_KEY} + - OI_SECRET_KEY=${OI_SECRET_KEY} + - OI_PREFIX=${OI_PREFIX} + - SITEMAP_PREFIX=${SITEMAP_PREFIX} + - SITEMAP_PAGES_LIMIT=${SITEMAP_PAGES_LIMIT} volumes: dbdata: networks: diff --git a/openshift/templates/openinfoservice/openinfoservice-build.yaml b/openshift/templates/openinfoservice/openinfoservice-build.yaml new file mode 100644 index 000000000..ab11ee893 --- /dev/null +++ b/openshift/templates/openinfoservice/openinfoservice-build.yaml @@ -0,0 +1,61 @@ +--- +kind: Template +apiVersion: template.openshift.io/v1 +metadata: + name: "${APP_NAME}-build-template" + creationTimestamp: +objects: +- kind: ImageStream + apiVersion: v1 + metadata: + name: "${APP_NAME}" +- kind: BuildConfig + apiVersion: v1 + metadata: + name: "${APP_NAME}-build" + labels: + app: "${APP_NAME}-build" + spec: + runPolicy: Serial + source: + type: Git + git: + uri: "${GIT_REPO_URL}" + ref: "${GIT_REF}" + contextDir: "${SOURCE_CONTEXT_DIR}" + strategy: + type: Docker + dockerStrategy: + dockerfilePath: "${DOCKER_FILE_PATH}" + pullSecret: + name: artifacts-pull-default-jmhvkc + output: + to: + kind: ImageStreamTag + name: "${APP_NAME}:latest" +parameters: +- name: APP_NAME + displayName: Name + description: The name assigned to all of the resources defined in this template. + required: true + value: reviewer-openinfoservice +- name: GIT_REPO_URL + displayName: Git Repo URL + description: The URL to your GIT repo. + required: true + value: https://github.com/bcgov/foi-docreviewer +- name: GIT_REF + displayName: Git Reference + description: The git reference or branch. + required: true + value: main +- name: SOURCE_CONTEXT_DIR + displayName: Source Context Directory + description: The source context directory. + required: false + value: computingservices/openinfoservice +- name: DOCKER_FILE_PATH + displayName: Docker File Path + description: The path to the docker file defining the build. + required: false + value: "Dockerfile.local" \ No newline at end of file diff --git a/openshift/templates/openinfoservice/openinfoservice-deploy.yaml b/openshift/templates/openinfoservice/openinfoservice-deploy.yaml new file mode 100644 index 000000000..1987fae10 --- /dev/null +++ b/openshift/templates/openinfoservice/openinfoservice-deploy.yaml @@ -0,0 +1,196 @@ +--- +kind: Template +apiVersion: template.openshift.io/v1 +metadata: + annotations: + description: Deployment template for a open information service. + tags: "${APP_NAME}" + name: "${APP_NAME}-deploy" +objects: +- kind: DeploymentConfig + apiVersion: v1 + metadata: + name: "${APP_NAME}" + labels: + app: "${APP_NAME}" + app-group: "${APP_GROUP}" + template: "${APP_NAME}-deploy" + spec: + strategy: + type: Rolling + rollingParams: + updatePeriodSeconds: 1 + intervalSeconds: 1 + timeoutSeconds: 600 + maxUnavailable: 25% + maxSurge: 25% + triggers: + - type: ImageChange + imageChangeParams: + automatic: true + containerNames: + - "${APP_NAME}" + from: + kind: ImageStreamTag + namespace: "${IMAGE_NAMESPACE}" + name: "${IMAGE_NAME}:${TAG_NAME}" + - type: ConfigChange + replicas: 1 + test: false + selector: + app: "${APP_NAME}" + deploymentconfig: "${APP_NAME}" + template: + metadata: + labels: + app: "${APP_NAME}" + app-group: "${APP_GROUP}" + deploymentconfig: "${APP_NAME}" + template: "${APP_NAME}-deploy" + spec: + containers: + - name: "${APP_NAME}" + image: "${APP_NAME}" + imagePullPolicy: Always + env: + - name: FOI_DB_USER + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: FOI_DB_USER + - name: FOI_DB_PASSWORD + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: FOI_DB_PASSWORD + - name: FOI_DB_NAME + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: FOI_DB_NAME + - name: FOI_DB_HOST + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: FOI_DB_HOST + - name: FOI_DB_PORT + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: FOI_DB_PORT + - name: OI_REDIS_HOST + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: OI_REDIS_HOST + - name: OI_REDIS_PORT + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: OI_REDIS_PORT + - name: OI_REDIS_PASSWORD + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: OI_REDIS_PASSWORD + - name: OI_QUEUE_NAME + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: OI_QUEUE_NAME + - name: OI_S3_ENV + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: OI_S3_ENV + - name: OI_S3_HOST + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: OI_S3_HOST + - name: OI_S3_REGION + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: OI_S3_REGION + - name: OI_S3_BUCKET + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: OI_S3_BUCKET + - name: OI_ACCESS_KEY + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: OI_ACCESS_KEY + - name: OI_SECRET_KEY + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: OI_SECRET_KEY + - name: OI_PREFIX + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: OI_PREFIX + - name: SITEMAP_PREFIX + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: SITEMAP_PREFIX + - name: SITEMAP_PAGES_LIMIT + valueFrom: + secretKeyRef: + name: "${SECRETS}" + key: SITEMAP_PAGES_LIMIT + resources: + requests: + cpu: "50m" + memory: "250Mi" + limits: + cpu: "150m" + memory: "500Mi" + terminationMessagePath: "/dev/termination-log" + terminationMessagePolicy: File + imagePullPolicy: Always + restartPolicy: Always + terminationGracePeriodSeconds: 30 + dnsPolicy: ClusterFirst + securityContext: {} + schedulerName: default-scheduler + # status: + # loadBalancer: {} +parameters: +- name: APP_NAME + displayName: Name + description: The name assigned to all of the OpenShift resources associated to the + server instance. + required: true + value: reviewer-openinfoservice +- name: APP_GROUP + displayName: App Group + description: The name assigned to all of the deployments in this project. + required: true + value: foi-docreviewer +- name: IMAGE_NAMESPACE + displayName: Image Namespace + required: true + description: The namespace of the OpenShift project containing the imagestream for + the application. + value: d106d6-tools +- name: IMAGE_NAME + displayName: Name + description: The name assigned to all of the OpenShift resources associated to the + server instance. + required: true + value: reviewer-openinfoservice +- name: TAG_NAME + displayName: Environment TAG name + description: The TAG name for this environment, e.g., dev, test, prod + required: true + value: dev +- name: SECRETS + displayName: OpenInfo Secrets + description: Name of secrets for all openinfoservice values + required: true + value: openinfoservice-secret diff --git a/openshift/templates/openinfoservice/openinfoservice-enqueueforpublish-build.yaml b/openshift/templates/openinfoservice/openinfoservice-enqueueforpublish-build.yaml new file mode 100644 index 000000000..075b0d9b4 --- /dev/null +++ b/openshift/templates/openinfoservice/openinfoservice-enqueueforpublish-build.yaml @@ -0,0 +1,61 @@ +--- +kind: Template +apiVersion: template.openshift.io/v1 +metadata: + name: "${APP_NAME}-build-template" + creationTimestamp: +objects: +- kind: ImageStream + apiVersion: v1 + metadata: + name: "${APP_NAME}" +- kind: BuildConfig + apiVersion: v1 + metadata: + name: "${APP_NAME}-build" + labels: + app: "${APP_NAME}-build" + spec: + runPolicy: Serial + source: + type: Git + git: + uri: "${GIT_REPO_URL}" + ref: "${GIT_REF}" + contextDir: "${SOURCE_CONTEXT_DIR}" + strategy: + type: Docker + dockerStrategy: + dockerfilePath: "${DOCKER_FILE_PATH}" + pullSecret: + name: artifacts-pull-default-jmhvkc + output: + to: + kind: ImageStreamTag + name: "${APP_NAME}:latest" +parameters: +- name: APP_NAME + displayName: Name + description: The name assigned to all of the resources defined in this template. + required: true + value: reviewer-openinfoservice +- name: GIT_REPO_URL + displayName: Git Repo URL + description: The URL to your GIT repo. + required: true + value: https://github.com/bcgov/foi-docreviewer +- name: GIT_REF + displayName: Git Reference + description: The git reference or branch. + required: true + value: main +- name: SOURCE_CONTEXT_DIR + displayName: Source Context Directory + description: The source context directory. + required: false + value: computingservices/openinfoservice +- name: DOCKER_FILE_PATH + displayName: Docker File Path + description: The path to the docker file defining the build. + required: false + value: "Dockerfile.enqueueforpublish.local" \ No newline at end of file diff --git a/openshift/templates/openinfoservice/openinfoservice-enqueueforunpublish-build.yaml b/openshift/templates/openinfoservice/openinfoservice-enqueueforunpublish-build.yaml new file mode 100644 index 000000000..988708bcb --- /dev/null +++ b/openshift/templates/openinfoservice/openinfoservice-enqueueforunpublish-build.yaml @@ -0,0 +1,61 @@ +--- +kind: Template +apiVersion: template.openshift.io/v1 +metadata: + name: "${APP_NAME}-build-template" + creationTimestamp: +objects: +- kind: ImageStream + apiVersion: v1 + metadata: + name: "${APP_NAME}" +- kind: BuildConfig + apiVersion: v1 + metadata: + name: "${APP_NAME}-build" + labels: + app: "${APP_NAME}-build" + spec: + runPolicy: Serial + source: + type: Git + git: + uri: "${GIT_REPO_URL}" + ref: "${GIT_REF}" + contextDir: "${SOURCE_CONTEXT_DIR}" + strategy: + type: Docker + dockerStrategy: + dockerfilePath: "${DOCKER_FILE_PATH}" + pullSecret: + name: artifacts-pull-default-jmhvkc + output: + to: + kind: ImageStreamTag + name: "${APP_NAME}:latest" +parameters: +- name: APP_NAME + displayName: Name + description: The name assigned to all of the resources defined in this template. + required: true + value: reviewer-openinfoservice +- name: GIT_REPO_URL + displayName: Git Repo URL + description: The URL to your GIT repo. + required: true + value: https://github.com/bcgov/foi-docreviewer +- name: GIT_REF + displayName: Git Reference + description: The git reference or branch. + required: true + value: main +- name: SOURCE_CONTEXT_DIR + displayName: Source Context Directory + description: The source context directory. + required: false + value: computingservices/openinfoservice +- name: DOCKER_FILE_PATH + displayName: Docker File Path + description: The path to the docker file defining the build. + required: false + value: "Dockerfile.enqueueforunpublish.local" \ No newline at end of file diff --git a/openshift/templates/openinfoservice/openinfoservice-sitemap-build.yaml b/openshift/templates/openinfoservice/openinfoservice-sitemap-build.yaml new file mode 100644 index 000000000..b3a3451ac --- /dev/null +++ b/openshift/templates/openinfoservice/openinfoservice-sitemap-build.yaml @@ -0,0 +1,61 @@ +--- +kind: Template +apiVersion: template.openshift.io/v1 +metadata: + name: "${APP_NAME}-build-template" + creationTimestamp: +objects: +- kind: ImageStream + apiVersion: v1 + metadata: + name: "${APP_NAME}" +- kind: BuildConfig + apiVersion: v1 + metadata: + name: "${APP_NAME}-build" + labels: + app: "${APP_NAME}-build" + spec: + runPolicy: Serial + source: + type: Git + git: + uri: "${GIT_REPO_URL}" + ref: "${GIT_REF}" + contextDir: "${SOURCE_CONTEXT_DIR}" + strategy: + type: Docker + dockerStrategy: + dockerfilePath: "${DOCKER_FILE_PATH}" + pullSecret: + name: artifacts-pull-default-jmhvkc + output: + to: + kind: ImageStreamTag + name: "${APP_NAME}:latest" +parameters: +- name: APP_NAME + displayName: Name + description: The name assigned to all of the resources defined in this template. + required: true + value: reviewer-openinfoservice +- name: GIT_REPO_URL + displayName: Git Repo URL + description: The URL to your GIT repo. + required: true + value: https://github.com/bcgov/foi-docreviewer +- name: GIT_REF + displayName: Git Reference + description: The git reference or branch. + required: true + value: main +- name: SOURCE_CONTEXT_DIR + displayName: Source Context Directory + description: The source context directory. + required: false + value: computingservices/openinfoservice +- name: DOCKER_FILE_PATH + displayName: Docker File Path + description: The path to the docker file defining the build. + required: false + value: "Dockerfile.sitemap.local" \ No newline at end of file diff --git a/sample.env b/sample.env index 7ed1e4b6b..c02a58504 100644 --- a/sample.env +++ b/sample.env @@ -125,4 +125,27 @@ DOCUMENTSERVICE_DB_PORT= DOCUMENTSERVICE_DB_USER= DOCUMENTSERVICE_DB_PASSWORD= -BIG_HTTP_GET_TIMEOUT=300000 \ No newline at end of file +BIG_HTTP_GET_TIMEOUT=300000 + +##### Open Info ENVs ##### +FOI_DB_USER= +FOI_DB_PASSWORD= +FOI_DB_NAME= +FOI_DB_HOST= +FOI_DB_PORT= + +OI_REDIS_HOST= +OI_REDIS_PORT= +OI_REDIS_PASSWORD= +OI_QUEUE_NAME=OpenInfoQueue + +OI_S3_ENV= +OI_S3_HOST= +OI_S3_REGION=us-east-1 +OI_S3_BUCKET=openinfopub +OI_ACCESS_KEY= +OI_SECRET_KEY= + +OI_PREFIX=packages/ +SITEMAP_PREFIX=sitemap/ +SITEMAP_PAGES_LIMIT=5000 \ No newline at end of file