diff --git a/.github/workflows/reusable-copy-to-s3.yml b/.github/workflows/reusable-copy-to-s3.yml index f89e09a..6784921 100644 --- a/.github/workflows/reusable-copy-to-s3.yml +++ b/.github/workflows/reusable-copy-to-s3.yml @@ -70,20 +70,16 @@ jobs: - name: get session name id: get-session-name env: - REPO: ${{ github.repository }} AWS_ROLE_SESSION_NAME: ${{ inputs.aws-role-session-name }} - DIRECTION: ${{ inputs.direction }} run: | - SESSION_NAME="$(echo "github-actions-$DIRECTION-$REPO" | sed 's,/,--,g' | tr '[[:upper:]]' '[[:lower:]]')" + SESSION_NAME="" if [ -n "$AWS_ROLE_SESSION_NAME" ]; then - SESSION_NAME="$AWS_ROLE_SESSION_NAME" + SESSION_NAME="$AWS_ROLE_SESSION_NAME" fi echo "session-name=$SESSION_NAME" >> $GITHUB_OUTPUT - name: validate env: - REGEXP_S3_BUCKET: "^s3://[a-zA-Z0-9!_.*'()/-]+$" CP_OR_SYNC: ${{ inputs.cp-or-sync }} - S3_BUCKET: ${{ inputs.s3-bucket }} DIRECTION: ${{ inputs.direction }} run: | ERRORS=false @@ -91,12 +87,6 @@ jobs: echo "error: command must be either 'cp' or 'sync'" >/dev/stderr ERRORS=true fi - # tested with - # for BUCKET in $(aws-vault exec prod -- aws s3 ls | awk '{print "s3://" $3}'); do echo $BUCKET | grep -q -E $REGEXP_S3_BUCKET || echo "INVALID BUCKET $BUCKET"; done - if ! echo "$S3_BUCKET" | grep -q -E $REGEXP_S3_BUCKET; then - echo "error: not valid s3 bucket URI '$S3_BUCKET'" >/dev/stderr - ERRORS=true - fi if ! echo "$DIRECTION" | grep -q -E '^(from|to)$'; then echo "error: command must be either 'from' or 'to'" >/dev/stderr ERRORS=true @@ -104,59 +94,29 @@ jobs: if [ "$ERRORS" = true ]; then exit 1 fi - - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + - name: Copy to S3 + uses: GeoNet/Actions/.github/actions/copy-to-s3@caS3 if: ${{ inputs.direction == 'to' }} - with: - name: ${{ inputs.artifact-name }} - path: ${{ inputs.artifact-path }} - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 with: aws-region: ${{ inputs.aws-region }} - role-to-assume: ${{ inputs.aws-role-arn-to-assume }} - role-duration-seconds: ${{ inputs.aws-role-duration-seconds }} - role-session-name: ${{ steps.get-session-name.outputs.session-name }} - - name: copy or sync - env: - CP_OR_SYNC: ${{ inputs.cp-or-sync }} - LOCAL_SOURCE_DIR: ${{ inputs.artifact-path }} - S3_BUCKET: ${{ inputs.s3-bucket }} - DIRECTION: ${{ inputs.direction }} - run: | - ARGS=() - case "$CP_OR_SYNC" in - cp) - if [ ${{ inputs.single-file }} = false ]; then - ARGS+=(--recursive) - fi - ;; - sync) - - ;; - *) - echo "Yer CI be propper haunted eh matey?" - esac - case "$DIRECTION" in - from) - echo "Copying from '$S3_BUCKET' to '$LOCAL_SOURCE_DIR'" - ARGS+=("$S3_BUCKET" "$LOCAL_SOURCE_DIR") - ;; - - to) - echo "Copying from '$LOCAL_SOURCE_DIR' to '$S3_BUCKET'" - ARGS+=("$LOCAL_SOURCE_DIR" "$S3_BUCKET") - ;; - - *) - echo "*ghostly ooooohhhh* this is the ghost of GitHub actions *ghostly ooooohhh*" - echo "how did you geeeettt heereeee???" - echo "*ghostly oooooohhh*" - esac - aws s3 "$CP_OR_SYNC" "${ARGS[@]}" - - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 + aws-role-arn-to-assume: ${{ inputs.aws-role-arn-to-assume }} + aws-role-duration-seconds: ${{ inputs.aws-role-duration-seconds }} + aws-role-session-name: ${{ steps.get-session-name.outputs.session-name }} + use-sync: ${{ inputs.cp-or-sync == 'sync' }} + single-file: ${{ inputs.single-file }} + artifact-name: ${{ inputs.artifact-name }} + artifact-path: ${{ inputs.artifact-path }} + s3-bucket-uri: ${{ inputs.s3-bucket }} + - name: Copy from S3 + uses: GeoNet/Actions/.github/actions/copy-from-s3@caS3 if: ${{ inputs.direction == 'from' }} with: - name: ${{ inputs.artifact-name }} - path: ${{ inputs.artifact-path }} - retention-days: 1 - overwrite: true + aws-region: ${{ inputs.aws-region }} + aws-role-arn-to-assume: ${{ inputs.aws-role-arn-to-assume }} + aws-role-duration-seconds: ${{ inputs.aws-role-duration-seconds }} + aws-role-session-name: ${{ steps.get-session-name.outputs.session-name }} + use-sync: ${{ inputs.cp-or-sync == 'sync' }} + single-file: ${{ inputs.single-file }} + artifact-name: ${{ inputs.artifact-name }} + artifact-path: ${{ inputs.artifact-path }} + s3-bucket-uri: ${{ inputs.s3-bucket }}