diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..2a8eede2 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,13 @@ +root = true + +[*] +end_of_line = lf +charset = utf-8 +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +[*.{yml,yaml}] +indent_style = space +indent_size = 2 diff --git a/.github/workflows/check_deploy.yml b/.github/workflows/check_deploy.yml new file mode 100644 index 00000000..c4c6a2dd --- /dev/null +++ b/.github/workflows/check_deploy.yml @@ -0,0 +1,219 @@ +# For docs on this see: +# * https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions +# * https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request +# * https://docs.github.com/en/webhooks/webhook-events-and-payloads?actionType=synchronize#pull_request +on: + push: + branches: + - main + pull_request: + types: + - opened + - synchronize # when commits are pushed to the PR + - reopened + - edited # title or body of a pull request was edited, or the base branch of a pull request was changed + +env: + tf_actions_working_dir: "./tf/environments/production" + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + +jobs: + ansible: + needs: terraform + runs-on: ubuntu-latest + defaults: + run: + working-directory: ${{ env.tf_actions_working_dir }}/ansible + permissions: + pull-requests: write + steps: + - uses: actions/checkout@v4 + + - name: Install Ansible + run: | + sudo apt update + sudo apt install software-properties-common + sudo add-apt-repository --yes --update ppa:ansible/ansible-9 + sudo apt install -y ansible + + - name: Write devops ssh key to .ssh + run: | + mkdir -p ~/.ssh/ + chmod 700 ~/.ssh/ + echo "${{ secrets.AWS_SSH_KEY }}" > ~/.ssh/ooni-devops-prod.pem + chmod 600 ~/.ssh/ooni-devops-prod.pem + + - name: Run Ansible Playbook + id: playbook + env: + ANSIBLE_SSH_ARGS: "-o UserKnownHostsFile=known_hosts" + run: | + echo "ansible_playbook<> "$GITHUB_OUTPUT" + echo "\$ ansible-playbook playbook.yml --check --diff -i inventory.ini" >> "$GITHUB_OUTPUT" + ansible-playbook playbook.yml --check --diff -i inventory.ini --key-file ~/.ssh/ooni-devops-prod.pem 2>&1 | tee -a "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + continue-on-error: true + + # This can be uncommmented to make it possible to ssh into the container to debug the run + #- name: Setup tmate session + # uses: mxschmitt/action-tmate@v3 + + - uses: actions/github-script@v6 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const commentTitle = "Ansible Run Output"; + const ansiblePlaybookOutput = `${{ steps.playbook.outputs.ansible_playbook}}`; + const parts = ansiblePlaybookOutput.split(/PLAY RECAP \*+/); + const ansiblePlaybookRecap = parts.length > 1 ? parts[1].trim() : ''; + + const commentBody = ` + #### Ansible Playbook Recap 🔍 + + \`\`\`\n + ${ansiblePlaybookRecap} + \`\`\` + + #### Ansible playbook output 📖\`${{ steps.playbook.outcome }}\` + +
Show Execution + + \`\`\`\n + ${ansiblePlaybookOutput} + \`\`\` + +
+ + | | | + |-------------------|------------------------------------| + | Pusher | @${{ github.actor }} | + | Action | ${{ github.event_name }} | + | Working Directory | ${{ env.tf_actions_working_dir }} | + | Workflow | ${{ github.workflow }} | + | Last updated | ${(new Date()).toUTCString()} | + `; + + // Call the script to write the comment + const script = require('./scripts/ghactions/comment-on-pr.js'); + await script({github, context, core, commentTitle, commentBody}); + + terraform: + runs-on: ubuntu-latest + if: ${{ !startsWith(github.event.head_commit.message, 'skip-terraform:') }} + defaults: + run: + working-directory: ${{ env.tf_actions_working_dir }} + permissions: + contents: write + pull-requests: write + env: + TF_VAR_aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} + TF_VAR_aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + TF_VAR_datadog_api_key: ${{ secrets.DATADOG_API_KEY }} + + steps: + - uses: actions/checkout@v4 + + - name: Install Terraform + run: | + wget -O- https://apt.releases.hashicorp.com/gpg | sudo gpg --dearmor -o /usr/share/keyrings/hashicorp-archive-keyring.gpg + echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" \ + | sudo tee /etc/apt/sources.list.d/hashicorp.list + sudo apt update && sudo apt install terraform + + - name: Terraform fmt + id: fmt + run: terraform fmt -check + continue-on-error: true + + - name: Terraform Init + id: init + run: terraform init + + - name: Terraform Validate + id: validate + run: | + echo "terraform_validate<> "$GITHUB_OUTPUT" + echo "\$ terraform validate" >> "$GITHUB_OUTPUT" + terraform validate -no-color | tee -a "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + + - name: Terraform Plan + id: plan + run: | + echo "terraform_plan<> "$GITHUB_OUTPUT" + echo "\$ terraform plan" >> "$GITHUB_OUTPUT" + terraform plan -no-color | tee -a "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + continue-on-error: true + + # Temporarily disabled, probably should be moved to a deploy action with stricter checks + #- name: Terraform Apply + # id: apply + # run: | + # echo "terraform_apply<> "$GITHUB_OUTPUT" + # echo "\$ terraform apply -auto-approve" >> "$GITHUB_OUTPUT" + # terraform apply -auto-approve -no-color | tee -a "$GITHUB_OUTPUT" + # echo "EOF" >> "$GITHUB_OUTPUT" + # continue-on-error: true + + - uses: actions/github-script@v6 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const terraformPlanOutput = `${{ steps.plan.outputs.terraform_plan }}`; + const terraformApplyOutput = `${{ steps.apply.outputs.terraform_apply }}`; + + const terraformPlanPlanLine = terraformPlanOutput.split('\n').find(line => line.startsWith('Plan:')); + const terraformApplyPlanLine = terraformApplyOutput.split('\n').find(line => line.startsWith('Plan:')); + const terraformApplyApplyLine = terraformApplyOutput.split('\n').find(line => line.startsWith('Apply complete!')); + + const commentTitle = "Terraform Run Output"; + const commentBody = ` + #### Format and Style 🖌\`${{ steps.fmt.outcome }}\` + #### Initialization ⚙️\`${{ steps.init.outcome }}\` + #### Validation 🤖\`${{ steps.validate.outcome }}\` +
Validation Output + + \`\`\`\n + ${{ steps.validate.outputs.terraform_validate }} + \`\`\` + +
+ + #### Plan 📖\`${{ steps.plan.outcome }}\` + * **${terraformPlanPlanLine}** + +
Show Plan + + \`\`\`\n + ${terraformPlanOutput} + \`\`\` + +
+ + #### Apply 📖\`${{ steps.apply.outcome }}\` + * **${terraformApplyPlanLine}** + * **${terraformApplyApplyLine}** + +
Show Apply + + \`\`\`\n + ${terraformApplyOutput} + \`\`\` + +
+ + | | | + |-------------------|------------------------------------| + | Pusher | @${{ github.actor }} | + | Action | ${{ github.event_name }} | + | Working Directory | ${{ env.tf_actions_working_dir }} | + | Workflow | ${{ github.workflow }} | + | Last updated | ${ (new Date()).toUTCString() } | + `; + + // Call the script to write the comment + const script = require('./scripts/ghactions/comment-on-pr.js'); + await script({github, context, core, commentTitle, commentBody}); diff --git a/.gitignore b/.gitignore index 3fa8c86b..4e9a26b4 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,16 @@ -.terraform +# Local .terraform directories +**/.terraform/* + +# .tfstate files +*.tfstate +*.tfstate.* + +# Crash log files +crash.log +crash.*.log + +# Ignore override files as they are usually used to override resources locally +override.tf +override.tf.json +*_override.tf +*_override.tf.json diff --git a/reverse-proxy/Dockerfile b/reverse-proxy/Dockerfile deleted file mode 100644 index 4a1d8917..00000000 --- a/reverse-proxy/Dockerfile +++ /dev/null @@ -1,2 +0,0 @@ -FROM haproxy:2.3 -COPY haproxy.cfg /usr/local/etc/haproxy/haproxy.cfg diff --git a/reverse-proxy/haproxy.cfg b/reverse-proxy/haproxy.cfg deleted file mode 100644 index 8a7c6274..00000000 --- a/reverse-proxy/haproxy.cfg +++ /dev/null @@ -1,16 +0,0 @@ -defaults - mode http - -frontend api - bind *:80 - - acl path_dataapi_1 path_beg /api/v1/measurements - use_backend dataapi if path_dataapi_1 - - default_backend default_server - -backend dataapi - server render_dataapi dataapi-ig2v:80 - -backend default_server - server backend_fsn backend-fsn.ooni.org:443 ssl verify none diff --git a/scripts/dump-tables-ch.sh b/scripts/dump-tables-ch.sh new file mode 100755 index 00000000..efd11d70 --- /dev/null +++ b/scripts/dump-tables-ch.sh @@ -0,0 +1,46 @@ +#!/bin/bash +set -euxo pipefail +# This is to be run manually on the clickhouse host to dump schemas and table dumps +# You may want to make some tweaks to the dumping rules in order avoid dumping +# too much data (eg. fastpath) +# You should then scp the data over to the target host manually, by running: +# $ scp * clickhouse-instance2:/var/lib/clickhouse/ooni-dumps/ +TABLES=( +"fastpath" +"jsonl" +"url_priorities" +"citizenlab" +"citizenlab_flip" +"test_groups" +"accounts" +"session_expunge" +"msmt_feedback" +"fingerprints_dns" +"fingerprints_http" +"asnmeta" +"counters_test_list" +"counters_asn_test_list" +"incidents" +"oonirun" +) + +dump_dir="./dumps" +current_date=$(date +%Y%m%d) + +# Directory to store the dumps +mkdir -p "$dump_dir" + +# Iterate over each table +for table in "${TABLES[@]}"; do + # Define file names for schema and data dump + schema_file="${dump_dir}/${current_date}-${table}_schema.sql" + data_file="${dump_dir}/${current_date}-${table}_dump.clickhouse" + + # Dump the table schema + echo "[+] dumping schema $schema_file" + clickhouse-client --query="SHOW CREATE TABLE ${table} FORMAT TabSeparatedRaw" > "$schema_file" + + # Dump the table data in ClickHouse native format + echo "[+] dumping table data $data_file" + clickhouse-client --query="SELECT * FROM ${table} INTO OUTFILE '${data_file}' FORMAT Native" +done diff --git a/scripts/ghactions/comment-on-pr.js b/scripts/ghactions/comment-on-pr.js new file mode 100644 index 00000000..c4d4131e --- /dev/null +++ b/scripts/ghactions/comment-on-pr.js @@ -0,0 +1,44 @@ +module.exports = async ({ + github, + context, + core, + commentTitle, + commentBody, +}) => { + const body = `## ${commentTitle} 🤖 + ${commentBody} + `; + + const prNumber = context.payload.pull_request.number; + + if (prNumber) { + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + }); + const existingBotComment = comments.find((comment) => { + return ( + comment.user.type === "Bot" && + comment.body.includes(`## ${commentTitle}`) + ); + }); + + if (existingBotComment) { + await github.rest.issues.updateComment({ + issue_number: prNumber, + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: existingBotComment.id, + body, + }); + } else { + await github.rest.issues.createComment({ + issue_number: prNumber, + owner: context.repo.owner, + repo: context.repo.repo, + body, + }); + } + } +}; diff --git a/scripts/ghactions/update-deploy-status.js b/scripts/ghactions/update-deploy-status.js new file mode 100644 index 00000000..14022329 --- /dev/null +++ b/scripts/ghactions/update-deploy-status.js @@ -0,0 +1,60 @@ +const parseCheckboxStatus = (markdown) => { + const lines = markdown.split("\n"); + const checkboxStatuses = []; + + for (let line of lines) { + // Match lines with checkboxes + const match = line.match(/^\* \[([ x])\] Trigger (.+) deploy/); + if (match) { + const isChecked = match[1] === "x"; // Check if the checkbox is marked as checked + const environment = match[2]; + checkboxStatuses.push({ environment, isChecked }); + } + } + return checkboxStatuses; +}; + +module.exports = async ({ github, context, core, newStatus = {} }) => { + const autogenPlaceholder = + ""; + const currentPrBody = github.context.payload.pull_request?.body || ""; + + const p = currentPrBody.split(autogenPlaceholder); + const existingText = parts[0]; + const existingStatusMarkdown = parts[1]; + let existingStatus; + + if (existingStatusMarkdown) { + existingStatus = parseCheckboxStatus(existingStatus); + } + + const actionLines = ["production", "staging", "testing"] + .map((environment) => { + let checkbox = " "; + let suffix = ""; + if (newStatus[environment] == true) { + checkbox = "x"; + suffix = "✅"; + } + return "* [${checkbox}] Trigger ${environment} deploy ${suffix}"; + }) + .join("\n"); + + const newBody = `${existingText} + ${autogenPlaceholder} + ## Deployment Actions 🚀 + ${actionLines} + `; + + const prNumber = context.payload.pull_request.number; + if (prNumber) { + await github.rest.pulls.update({ + owner, + repo, + pull_number: prNumber, + body: newBody, + }); + } + + return existingStatus; +}; diff --git a/scripts/restore-dumps.sh b/scripts/restore-dumps.sh new file mode 100755 index 00000000..df67126e --- /dev/null +++ b/scripts/restore-dumps.sh @@ -0,0 +1,35 @@ +#!/bin/bash +set -euxo pipefail +# Restore schema and sampled dumps to running clickhouse instance +# This script is to be run manually from with CWD set to contains the dumps and +# schema files generated from `dump-tables-ch.sh` +# See dump-tables-ch.sh for instruction on it's usage +for schema_file in *schema.sql;do + cat $schema_file | clickhouse-client; +done + +dump_ts="20240202" +TABLES=( +"jsonl" +"url_priorities" +"citizenlab" +"citizenlab_flip" +"test_groups" +"accounts" +"session_expunge" +"msmt_feedback" +"fingerprints_dns" +"fingerprints_http" +"asnmeta" +"counters_test_list" +"counters_asn_test_list" +"incidents" +"oonirun" +) +for table in "${TABLES[@]}"; do + echo "Restoring ${table}" + cat ${dump_ts}-${table}_dump.clickhouse | clickhouse-client --query="INSERT INTO ${table} FORMAT Native" +done + +echo "Restoring fastpath" +gzip -cd 20240109T1314-fastpath.clickhouse.gz | clickhouse-client --query="INSERT INTO fastpath FORMAT Native" diff --git a/tf/environments/production/.terraform.lock.hcl b/tf/environments/production/.terraform.lock.hcl new file mode 100644 index 00000000..58e338af --- /dev/null +++ b/tf/environments/production/.terraform.lock.hcl @@ -0,0 +1,87 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "5.35.0" + constraints = ">= 4.9.0, >= 4.66.1" + hashes = [ + "h1:MKNFmhsOIirK7Qzr6TWkVaBcVGN81lCU0BPiaPOeQ8s=", + "h1:fggCACmhwwn6NOo3D6xY6WDyZfBSbMIb47X/MOC+zqE=", + "zh:3a2a6f40db82d30ea8c5e3e251ca5e16b08e520570336e7e342be823df67e945", + "zh:420a23b69b412438a15b8b2e2c9aac2cf2e4976f990f117e4bf8f630692d3949", + "zh:4d8b887f6a71b38cff77ad14af9279528433e279eed702d96b81ea48e16e779c", + "zh:4edd41f8e1c7d29931608a7b01a7ae3d89d6f95ef5502cf8200f228a27917c40", + "zh:6337544e2ded5cf37b55a70aa6ce81c07fd444a2644ff3c5aad1d34680051bdc", + "zh:668faa3faaf2e0758bf319ea40d2304340f4a2dc2cd24460ddfa6ab66f71b802", + "zh:79ddc6d7c90e59fdf4a51e6ea822ba9495b1873d6a9d70daf2eeaf6fc4eb6ff3", + "zh:885822027faf1aa57787f980ead7c26e7d0e55b4040d926b65709b764f804513", + "zh:8c50a8f397b871388ff2e048f5eb280af107faa2e8926694f1ffd9f32a7a7cdf", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:a2f5d2553df5573a060641f18ee7585587047c25ba73fd80617f59b5893d22b4", + "zh:c43833ae2a152213ee92eb5be7653f9493779eddbe0ce403ea49b5f1d87fd766", + "zh:dab01527a3a55b4f0f958af6f46313d775e27f9ad9d10bedbbfea4a35a06dc5f", + "zh:ed49c65620ec42718d681a7fc00c166c295ff2795db6cede2c690b83f9fb3e65", + "zh:f0a358c0ae1087c466d0fbcc3b4da886f33f881a145c3836ec43149878b86a1a", + ] +} + +provider "registry.terraform.io/hashicorp/local" { + version = "2.4.1" + constraints = ">= 2.0.0" + hashes = [ + "h1:FzraUapGrJoH3ZOWiUT2m6QpZAD+HmU+JmqZgM4/o2Y=", + "h1:gpp25uNkYJYzJVnkyRr7RIBVfwLs9GSq2HNnFpTRBg0=", + "zh:244b445bf34ddbd167731cc6c6b95bbed231dc4493f8cc34bd6850cfe1f78528", + "zh:3c330bdb626123228a0d1b1daa6c741b4d5d484ab1c7ae5d2f48d4c9885cc5e9", + "zh:5ff5f9b791ddd7557e815449173f2db38d338e674d2d91800ac6e6d808de1d1d", + "zh:70206147104f4bf26ae67d730c995772f85bf23e28c2c2e7612c74f4dae3c46f", + "zh:75029676993accd6bef933c196b2fad51a9ec8a69a847dbbe96ec8ebf7926cdc", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:7d48d5999fe1fcdae9295a7c3448ac1541f5a24c474bd82df6d4fa3732483f2b", + "zh:b766b38b027f0f84028244d1c2f990431a37d4fc3ac645962924554016507e77", + "zh:bfc7ad301dada204cf51c59d8bd6a9a87de5fddb42190b4d6ba157d6e08a1f10", + "zh:c902b527702a8c5e2c25a6637d07bbb1690cb6c1e63917a5f6dc460efd18d43f", + "zh:d68ae0e1070cf429c46586bc87580c3ed113f76241da2b6e4f1a8348126b3c46", + "zh:f4903fd89f7c92a346ae9e666c2d0b6884c4474ae109e9b4bd15e7efaa4bfc29", + ] +} + +provider "registry.terraform.io/hashicorp/null" { + version = "3.2.2" + hashes = [ + "h1:IMVAUHKoydFrlPrl9OzasDnw/8ntZFerCC9iXw1rXQY=", + "zh:3248aae6a2198f3ec8394218d05bd5e42be59f43a3a7c0b71c66ec0df08b69e7", + "zh:32b1aaa1c3013d33c245493f4a65465eab9436b454d250102729321a44c8ab9a", + "zh:38eff7e470acb48f66380a73a5c7cdd76cc9b9c9ba9a7249c7991488abe22fe3", + "zh:4c2f1faee67af104f5f9e711c4574ff4d298afaa8a420680b0cb55d7bbc65606", + "zh:544b33b757c0b954dbb87db83a5ad921edd61f02f1dc86c6186a5ea86465b546", + "zh:696cf785090e1e8cf1587499516b0494f47413b43cb99877ad97f5d0de3dc539", + "zh:6e301f34757b5d265ae44467d95306d61bef5e41930be1365f5a8dcf80f59452", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:913a929070c819e59e94bb37a2a253c228f83921136ff4a7aa1a178c7cce5422", + "zh:aa9015926cd152425dbf86d1abdbc74bfe0e1ba3d26b3db35051d7b9ca9f72ae", + "zh:bb04798b016e1e1d49bcc76d62c53b56c88c63d6f2dfe38821afef17c416a0e1", + "zh:c23084e1b23577de22603cff752e59128d83cfecc2e6819edadd8cf7a10af11e", + ] +} + +provider "registry.terraform.io/hashicorp/time" { + version = "0.10.0" + constraints = ">= 0.7.1" + hashes = [ + "h1:EeF/Lb4db1Kl1HEHzT1StTC7RRqHn/eB7aDR3C3yjVg=", + "h1:NAl8eupFAZXCAbE5uiHZTz+Yqler55B3fMG+jNPrjjM=", + "zh:0ab31efe760cc86c9eef9e8eb070ae9e15c52c617243bbd9041632d44ea70781", + "zh:0ee4e906e28f23c598632eeac297ab098d6d6a90629d15516814ab90ad42aec8", + "zh:3bbb3e9da728b82428c6f18533b5b7c014e8ff1b8d9b2587107c966b985e5bcc", + "zh:6771c72db4e4486f2c2603c81dfddd9e28b6554d1ded2996b4cb37f887b467de", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:833c636d86c2c8f23296a7da5d492bdfd7260e22899fc8af8cc3937eb41a7391", + "zh:c545f1497ae0978ffc979645e594b57ff06c30b4144486f4f362d686366e2e42", + "zh:def83c6a85db611b8f1d996d32869f59397c23b8b78e39a978c8a2296b0588b2", + "zh:df9579b72cc8e5fac6efee20c7d0a8b72d3d859b50828b1c473d620ab939e2c7", + "zh:e281a8ecbb33c185e2d0976dc526c93b7359e3ffdc8130df7422863f4952c00e", + "zh:ecb1af3ae67ac7933b5630606672c94ec1f54b119bf77d3091f16d55ab634461", + "zh:f8109f13e07a741e1e8a52134f84583f97a819e33600be44623a21f6424d6593", + ] +} diff --git a/tf/environments/production/ansible/inventory.ini b/tf/environments/production/ansible/inventory.ini new file mode 100755 index 00000000..d81954c4 --- /dev/null +++ b/tf/environments/production/ansible/inventory.ini @@ -0,0 +1,7 @@ +# Do not edit! +# autogenerated by terraform in ooni/devops +[all] +clickhouse.tier1.prod.ooni.nu + +[clickhouse_servers] +clickhouse.tier1.prod.ooni.nu diff --git a/tf/environments/production/ansible/known_hosts b/tf/environments/production/ansible/known_hosts new file mode 100644 index 00000000..b5a89f44 --- /dev/null +++ b/tf/environments/production/ansible/known_hosts @@ -0,0 +1,5 @@ +# Do not edit! +# this file is automatically generated by update_known_hosts.sh in ooni/devops +clickhouse.tier1.prod.ooni.nu ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC9s+1vPUwzfVYhXVPtzgFEGdmGy+9f1XSt+nvbdkEwENGD4zhcj+zUb/RZKmXphKkzdS2/7JDUzQgArlrMUsTGClMGUO7OQgh8DL3br6E20YDKCKCA/qwVeqfsM0Ho8/6KqMn7TBVeKDz+iJXdF+xMuLrWTRXl5borw22esdahdFAkUS3Z/+yjYolayvbsK7hYpdoYUcJwCZXn4VUUi2hXhf0+DQBDyABSCoUqLzbKTw/EHS9FverS6FwhrrUDCmYPgeenR5GfN1++4ABEOa9p9lPKIL1WURww09h4/D29NIaZh6QS/tIUBqfBl5pW3n9YsUse/T9cAwDQ6ZfU7x0tl5uQCW2KOzYjDLc21VLg+fXQpFV7Jwr6dPmxyugpgDqNfg4a9vvF3PTcbgaC84Ji7tLFF7LjFv/JsJA8q8vXJIbu6luH4ACXv/YSYhW4VgThsj4id1XGU+aZGdBawny2WvJ5Pzogwhh1zo6r7ayx7s+eWw/bUfN7WQPvBKnPTds= +clickhouse.tier1.prod.ooni.nu ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLXyX4o06DPdN8AhZaa+3UUrip5ni4yFi9alVX3w8f4V9TVjKaCJDLKX1MSFGMhcfJs8PtddoI1NiF50VVSS/3A= +clickhouse.tier1.prod.ooni.nu ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHj/mvQ53gz3ZSGt2XUjGWOOP4bZkCCwN/rboHD6AL0y diff --git a/tf/environments/production/ansible/playbook.yml b/tf/environments/production/ansible/playbook.yml new file mode 100644 index 00000000..68a9fe07 --- /dev/null +++ b/tf/environments/production/ansible/playbook.yml @@ -0,0 +1,15 @@ +--- +- name: ClickHouse servers + hosts: clickhouse_servers + user: admin + become: yes + vars: + clickhouse_reader_password: "{{ lookup('env', 'CLICKHOUSE_READER_PASSWORD') }}" + roles: + - clickhouse + handlers: + - name: restart clickhouse-server + service: + name: clickhouse-server + state: restarted + diff --git a/tf/environments/production/ansible/roles/clickhouse/tasks/main.yml b/tf/environments/production/ansible/roles/clickhouse/tasks/main.yml new file mode 100644 index 00000000..6680f947 --- /dev/null +++ b/tf/environments/production/ansible/roles/clickhouse/tasks/main.yml @@ -0,0 +1,74 @@ +- name: install clickhouse requirements + tags: clickhouse + apt: + cache_valid_time: 86400 + state: present + name: + - apt-transport-https + - ca-certificates + - dirmngr + +- name: Check if ClickHouse GPG keyring exists + ansible.builtin.stat: + path: /usr/share/keyrings/clickhouse-keyring.gpg + register: keyring_check + +- name: Create a temporary directory for GPG + ansible.builtin.tempfile: + state: directory + register: gnupg_temp_dir + when: not keyring_check.stat.exists + +- name: Import ClickHouse GPG key + ansible.builtin.command: + cmd: "gpg --no-default-keyring --keyring /usr/share/keyrings/clickhouse-keyring.gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 8919F6BD2B48D754" + chdir: "{{ gnupg_temp_dir.path }}" + creates: "/usr/share/keyrings/clickhouse-keyring.gpg" + environment: + GNUPGHOME: "{{ gnupg_temp_dir.path }}" + when: not keyring_check.stat.exists + +- name: Remove temporary directory + ansible.builtin.file: + path: "{{ gnupg_temp_dir.path }}" + state: absent + when: not keyring_check.stat.exists + +- name: Ensure the keyring is readable + ansible.builtin.file: + path: /usr/share/keyrings/clickhouse-keyring.gpg + mode: a+r + +- name: Add ClickHouse repository + ansible.builtin.apt_repository: + repo: "deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg] https://packages.clickhouse.com/deb stable main" + state: present + filename: clickhouse + +- name: Install ClickHouse server and client + ansible.builtin.apt: + name: + - clickhouse-server={{ clickhouse_pkg_ver }} + - clickhouse-client={{ clickhouse_pkg_ver }} + - clickhouse-common-static={{ clickhouse_pkg_ver }} + state: present + update_cache: yes + vars: + clickhouse_pkg_ver: 24.1.* + +- name: Ensure ClickHouse service is started and enabled + ansible.builtin.systemd: + name: clickhouse-server + state: started + enabled: yes + +- name: Configure ClickHouse users from template + template: + src: templates/ooni_users.xml + dest: /etc/clickhouse-server/users.d/ooni_users.xml + owner: clickhouse + group: clickhouse + mode: '0640' + notify: + - restart clickhouse-server + diff --git a/tf/environments/production/ansible/roles/clickhouse/templates/ooni_users.xml b/tf/environments/production/ansible/roles/clickhouse/templates/ooni_users.xml new file mode 100644 index 00000000..26081944 --- /dev/null +++ b/tf/environments/production/ansible/roles/clickhouse/templates/ooni_users.xml @@ -0,0 +1,30 @@ + + + + + 1 + + + + + + + 1 + admin + + 127.0.0.1 + + + + + + readonly + + 0.0.0.0 + + {{ clickhouse_reader_password }} + + + + + diff --git a/tf/environments/production/tier1/main.tf b/tf/environments/production/main.tf similarity index 60% rename from tf/environments/production/tier1/main.tf rename to tf/environments/production/main.tf index 83fdd502..0fc073ea 100644 --- a/tf/environments/production/tier1/main.tf +++ b/tf/environments/production/main.tf @@ -1,15 +1,71 @@ -provider "aws" { - region = var.aws_region - access_key = var.aws_access_key - secret_key = var.aws_secret_access_key +# Store terraform state in s3 +terraform { + backend "s3" { + region = "eu-central-1" + bucket = "ooni-production-terraform-state" + key = "terraform.tfstate" + profile = "" + encrypt = "true" + + dynamodb_table = "ooni-production-terraform-state-lock" + } } -data "aws_availability_zones" "available" {} +# You cannot create a new backend by simply defining this and then +# immediately proceeding to "terraform apply". The S3 backend must +# be bootstrapped according to the simple yet essential procedure in +# https://github.com/cloudposse/terraform-aws-tfstate-backend#usage +# You cannot create a new backend by simply defining this and then +# immediately proceeding to "terraform apply". The S3 backend must +# be bootstrapped according to the simple yet essential procedure in +# https://github.com/cloudposse/terraform-aws-tfstate-backend#usage +module "terraform_state_backend" { + source = "cloudposse/tfstate-backend/aws" + version = "1.4.0" + namespace = "ooni" + stage = "production" + name = "terraform" + attributes = ["state"] + + #terraform_backend_config_file_path = "." + terraform_backend_config_file_name = "backend.tf" + force_destroy = false +} +## Ansible inventory + +resource "local_file" "ansible_inventory" { + depends_on = [ + aws_route53_record.clickhouse_dns + ] + + content = templatefile("${path.module}/templates/ansible-inventory.tpl", { + clickhouse_servers = [ + aws_route53_record.clickhouse_dns.name + ] + }) + filename = "${path.module}/ansible/inventory.ini" +} + +resource "null_resource" "ansible_update_known_hosts" { + depends_on = [local_file.ansible_inventory] + + provisioner "local-exec" { + command = "./scripts/update_known_hosts.sh" + environment = { + INVENTORY_FILE = "ansible/inventory.ini" + KNOWN_HOSTS_FILE = "ansible/known_hosts" + } + } +} + +# Local variable definitions locals { - environment = "production" - name = "ooni-tier1-${local.environment}" + environment = "production" + name = "ooni-tier1-${local.environment}" ecs_cluster_name = "ooni-ecs-cluster" + dns_zone_ooni_nu = "Z035992527R8VEIX2UVO0" # ooni.nu hosted zone + dns_zone_ooni_io = "Z02418652BOD91LFA5S9X" # ooni.io hosted zone tags = { Name = local.name @@ -17,6 +73,16 @@ locals { } } +## AWS Setup + +provider "aws" { + region = var.aws_region + access_key = var.aws_access_key_id + secret_key = var.aws_secret_access_key +} + +data "aws_availability_zones" "available" {} + resource "aws_vpc" "main" { cidr_block = "10.0.0.0/16" } @@ -51,7 +117,7 @@ resource "aws_route_table_association" "a" { ### EC2 locals { - clickhouse_hostname = "clickhouse.tier1.prod.ooni.nu" + clickhouse_hostname = "clickhouse.tier1.prod.ooni.nu" clickhouse_device_name = "/dev/sdf" } @@ -72,13 +138,13 @@ data "aws_ami" "debian_ami" { } resource "aws_instance" "clickhouse_server_prod_tier1" { - ami = data.aws_ami.debian_ami.id - instance_type = "r5.xlarge" - key_name = var.key_name + ami = data.aws_ami.debian_ami.id + instance_type = "r5.xlarge" + key_name = var.key_name associate_public_ip_address = true - subnet_id = aws_subnet.main[0].id + subnet_id = aws_subnet.main[0].id vpc_security_group_ids = [aws_security_group.clickhouse_sg.id] root_block_device { @@ -87,25 +153,30 @@ resource "aws_instance" "clickhouse_server_prod_tier1" { } user_data = templatefile("${path.module}/templates/clickhouse-setup.sh", { - datadog_api_key = var.datadog_api_key, - hostname = local.clickhouse_hostname, - device_name = local.clickhouse_device_name + datadog_api_key = var.datadog_api_key, + hostname = local.clickhouse_hostname, + device_name = local.clickhouse_device_name }) - - tags = local.tags + + tags = merge( + local.tags, + { + Name = "clickhouse-${local.tags["Name"]}" + } + ) } resource "aws_ebs_volume" "clickhouse_data_volume" { availability_zone = aws_instance.clickhouse_server_prod_tier1.availability_zone - size = 1024 # 1 TB + size = 1024 # 1 TB type = "gp3" # SSD-based volume type, provides up to 16,000 IOPS and 1,000 MiB/s throughput - tags = local.tags + tags = local.tags } resource "aws_volume_attachment" "clickhouse_data_volume_attachment" { - device_name = local.clickhouse_device_name - volume_id = aws_ebs_volume.clickhouse_data_volume.id - instance_id = aws_instance.clickhouse_server_prod_tier1.id + device_name = local.clickhouse_device_name + volume_id = aws_ebs_volume.clickhouse_data_volume.id + instance_id = aws_instance.clickhouse_server_prod_tier1.id force_detach = true } @@ -115,14 +186,6 @@ resource "aws_eip" "clickhouse_ip" { tags = local.tags } -resource "aws_route53_record" "clickhouse_dns" { - zone_id = "Z035992527R8VEIX2UVO0" # ooni.nu hosted zone - name = local.clickhouse_hostname - type = "A" - ttl = "300" - records = [aws_eip.clickhouse_ip.public_ip] -} - resource "aws_security_group" "clickhouse_sg" { name = "clickhouse_sg" description = "Allow Clickhouse traffic" @@ -163,19 +226,19 @@ data "aws_ssm_parameter" "ecs_optimized_ami" { } resource "aws_launch_template" "app" { - name_prefix = "ooni-tier1-production-backend-lt" + name_prefix = "ooni-tier1-production-backend-lt" - key_name = var.key_name - image_id = jsondecode(data.aws_ssm_parameter.ecs_optimized_ami.value)["image_id"] - instance_type = var.instance_type + key_name = var.key_name + image_id = jsondecode(data.aws_ssm_parameter.ecs_optimized_ami.value)["image_id"] + instance_type = var.instance_type - user_data = base64encode(templatefile("${path.module}/templates/ecs-setup.sh", { - ecs_cluster_name = local.ecs_cluster_name, - ecs_cluster_tags = local.tags, - datadog_api_key = var.datadog_api_key, + user_data = base64encode(templatefile("${path.module}/templates/ecs-setup.sh", { + ecs_cluster_name = local.ecs_cluster_name, + ecs_cluster_tags = local.tags, + datadog_api_key = var.datadog_api_key, })) - update_default_version = true + update_default_version = true instance_initiated_shutdown_behavior = "terminate" iam_instance_profile { @@ -184,7 +247,7 @@ resource "aws_launch_template" "app" { network_interfaces { associate_public_ip_address = true - delete_on_termination = true + delete_on_termination = true security_groups = [ aws_security_group.instance_sg.id, ] @@ -201,20 +264,20 @@ resource "aws_launch_template" "app" { tag_specifications { resource_type = "instance" tags = { - Name: "ooni-tier1-production-backend" + Name : "ooni-tier1-production-backend" } } } resource "aws_autoscaling_group" "app" { - name_prefix = "ooni-tier1-production-backend-asg" - vpc_zone_identifier = aws_subnet.main[*].id - min_size = var.asg_min - max_size = var.asg_max - desired_capacity = var.asg_desired - - launch_template { - id = aws_launch_template.app.id + name_prefix = "ooni-tier1-production-backend-asg" + vpc_zone_identifier = aws_subnet.main[*].id + min_size = var.asg_min + max_size = var.asg_max + desired_capacity = var.asg_desired + + launch_template { + id = aws_launch_template.app.id version = "$Latest" } @@ -226,10 +289,8 @@ resource "aws_autoscaling_group" "app" { triggers = ["tag"] } - } - ### Security resource "aws_security_group" "lb_sg" { @@ -245,6 +306,13 @@ resource "aws_security_group" "lb_sg" { cidr_blocks = ["0.0.0.0/0"] } + ingress { + protocol = "tcp" + from_port = 443 + to_port = 443 + cidr_blocks = ["0.0.0.0/0"] + } + egress { from_port = 0 to_port = 0 @@ -302,17 +370,15 @@ resource "aws_ecs_cluster" "main" { locals { - container_image = "ooni/dataapi:latest" container_name = "ooni_dataapi" - container_port = 80 } resource "aws_ecs_task_definition" "dataapi" { family = "ooni-dataapi-production-td" container_definitions = templatefile("${path.module}/templates/task_definition.json", { - image_url = local.container_image, + image_url = "ooni/dataapi:${var.ooni_service_config.dataapi_version}", container_name = local.container_name, - container_port = local.container_port, + container_port = 80, log_group_region = var.aws_region, log_group_name = aws_cloudwatch_log_group.app.name }) @@ -471,6 +537,78 @@ resource "aws_alb_listener" "front_end" { tags = local.tags } +resource "aws_alb_listener" "front_end_https" { + load_balancer_arn = aws_alb.main.id + port = "443" + protocol = "HTTPS" + ssl_policy = "ELBSecurityPolicy-2016-08" + certificate_arn = aws_acm_certificate_validation.dataapi.certificate_arn + + default_action { + target_group_arn = aws_alb_target_group.dataapi.id + type = "forward" + } + + tags = local.tags +} + +# Route53 + +resource "aws_route53_record" "clickhouse_dns" { + zone_id = local.dns_zone_ooni_nu + name = local.clickhouse_hostname + type = "A" + ttl = "300" + records = [aws_eip.clickhouse_ip.public_ip] +} + +resource "aws_route53_record" "alb_dns" { + zone_id = local.dns_zone_ooni_io + name = "dataapi.prod.ooni.io" + type = "A" + + alias { + name = aws_alb.main.dns_name + zone_id = aws_alb.main.zone_id + evaluate_target_health = true + } +} + +# ACM TLS + +resource "aws_acm_certificate" "dataapi" { + domain_name = "dataapi.prod.ooni.io" + validation_method = "DNS" + + tags = local.tags + + lifecycle { + create_before_destroy = true + } +} + +resource "aws_route53_record" "dataapi_cert_validation" { + for_each = { + for dvo in aws_acm_certificate.dataapi.domain_validation_options : dvo.domain_name => { + name = dvo.resource_record_name + record = dvo.resource_record_value + type = dvo.resource_record_type + } + } + + allow_overwrite = true + name = each.value.name + records = [each.value.record] + ttl = 60 + type = each.value.type + zone_id = local.dns_zone_ooni_io +} + +resource "aws_acm_certificate_validation" "dataapi" { + certificate_arn = aws_acm_certificate.dataapi.arn + validation_record_fqdns = [for record in aws_route53_record.dataapi_cert_validation : record.fqdn] +} + ## CloudWatch Logs resource "aws_cloudwatch_log_group" "ecs" { diff --git a/tf/environments/production/tier1/outputs.tf b/tf/environments/production/outputs.tf similarity index 98% rename from tf/environments/production/tier1/outputs.tf rename to tf/environments/production/outputs.tf index 354ee8ba..0119e800 100644 --- a/tf/environments/production/tier1/outputs.tf +++ b/tf/environments/production/outputs.tf @@ -12,4 +12,5 @@ output "asg_name" { output "elb_hostname" { value = aws_alb.main.dns_name -} \ No newline at end of file +} + diff --git a/tf/environments/production/scripts/update_known_hosts.sh b/tf/environments/production/scripts/update_known_hosts.sh new file mode 100755 index 00000000..c08f6d52 --- /dev/null +++ b/tf/environments/production/scripts/update_known_hosts.sh @@ -0,0 +1,37 @@ +#!/bin/bash +set -euxo pipefail +# This script updates the known_hosts file with the SSH host keys of all hosts +# in the ansible inventory. It should be run after the hosts have been added to +# the inventory as part of terraform provisioning process. + +# To run you can ovverride the INVENTORY_FILE and KNOWN_HOSTS_FILE variables +# with a specific path +# You can also force the update by settting it to true: +# export FORCE_UPDATE=true ./update_known_hosts.sh +INVENTORY_FILE="${INVENTORY_FILE:-ansible/inventory.ini}" +KNOWN_HOSTS_FILE="${KNOWN_HOSTS_FILE:-ansible/known_hosts}" +FORCE_UPDATE="${FORCE_UPDATE:-false}" + +# fetch SSH host keys and update known_hosts +update_known_hosts() { + local host=$1 + if [ "$FORCE_UPDATE" = true ]; then + echo "Forcing update of known_hosts for $host" + # Check if the host already exists in known_hosts + elif grep -q -F "$host" "$KNOWN_HOSTS_FILE"; then + echo "$host already exists in known_hosts at `$KNOWN_HOSTS_FILE`" + return + fi + # store new keys + ssh-keyscan $host >> "$KNOWN_HOSTS_FILE" +} + +# Main loop to iterate over hosts in the inventory +while read -r line; do + # ignore comments and empty lines + if [[ $line =~ ^[a-zA-Z0-9] ]]; then + update_known_hosts $line + fi +# only look at the [all] group +done < <(awk '/^\[/{p=0}/\[all\]/{p=1}p' $INVENTORY_FILE | grep -v '\[' | awk '{print $1}') + diff --git a/tf/environments/production/templates/ansible-inventory.tpl b/tf/environments/production/templates/ansible-inventory.tpl new file mode 100644 index 00000000..f30da3c5 --- /dev/null +++ b/tf/environments/production/templates/ansible-inventory.tpl @@ -0,0 +1,12 @@ +# Do not edit! +# autogenerated by terraform in ooni/devops +[all] +%{ for hostname in clickhouse_servers ~} +${hostname} +%{ endfor ~} + +[clickhouse_servers] +%{ for hostname in clickhouse_servers ~} +${hostname} +%{ endfor ~} + diff --git a/tf/environments/production/templates/clickhouse-setup.sh b/tf/environments/production/templates/clickhouse-setup.sh new file mode 100644 index 00000000..50a307cc --- /dev/null +++ b/tf/environments/production/templates/clickhouse-setup.sh @@ -0,0 +1,12 @@ +#!/bin/bash +sudo hostnamectl set-hostname --static ${hostname} + +# Install datadog agent +DD_API_KEY=${datadog_api_key} DD_SITE="datadoghq.eu" bash -c "$(curl -L https://s3.amazonaws.com/dd-agent/scripts/install_script_agent7.sh)" + +sudo mkfs.ext4 -q -F ${device_name} +sudo mkdir -p /var/lib/clickhouse +sudo mount ${device_name} /var/lib/clickhouse +echo "${device_name} /var/lib/clickhouse ext4 defaults,nofail 0 2" | sudo tee -a /etc/fstab +sudo chown -R clickhouse:clickhouse /var/lib/clickhouse + diff --git a/tf/environments/production/tier1/templates/ecs-setup.sh b/tf/environments/production/templates/ecs-setup.sh similarity index 96% rename from tf/environments/production/tier1/templates/ecs-setup.sh rename to tf/environments/production/templates/ecs-setup.sh index ef1de413..00acf10d 100644 --- a/tf/environments/production/tier1/templates/ecs-setup.sh +++ b/tf/environments/production/templates/ecs-setup.sh @@ -8,4 +8,5 @@ ECS_ENABLE_TASK_IAM_ROLE=true EOF # Install datadog agent -DD_API_KEY=${datadog_api_key} DD_SITE="datadoghq.eu" bash -c "$(curl -L https://s3.amazonaws.com/dd-agent/scripts/install_script_agent7.sh)" \ No newline at end of file +DD_API_KEY=${datadog_api_key} DD_SITE="datadoghq.eu" bash -c "$(curl -L https://s3.amazonaws.com/dd-agent/scripts/install_script_agent7.sh)" + diff --git a/tf/environments/production/tier1/templates/instance_profile_policy.json b/tf/environments/production/templates/instance_profile_policy.json similarity index 100% rename from tf/environments/production/tier1/templates/instance_profile_policy.json rename to tf/environments/production/templates/instance_profile_policy.json diff --git a/tf/environments/production/tier1/templates/task_definition.json b/tf/environments/production/templates/task_definition.json similarity index 99% rename from tf/environments/production/tier1/templates/task_definition.json rename to tf/environments/production/templates/task_definition.json index ecd17bea..81d5d7ff 100644 --- a/tf/environments/production/tier1/templates/task_definition.json +++ b/tf/environments/production/templates/task_definition.json @@ -20,3 +20,4 @@ } } ] + diff --git a/tf/environments/production/terraform.tfvars.json b/tf/environments/production/terraform.tfvars.json new file mode 100644 index 00000000..64b1c656 --- /dev/null +++ b/tf/environments/production/terraform.tfvars.json @@ -0,0 +1,5 @@ +{ + "ooni_service_config": { + "dataapi_version": "v0.2.0.dev1" + } +} diff --git a/tf/environments/production/tier1/.terraform.lock.hcl b/tf/environments/production/tier1/.terraform.lock.hcl deleted file mode 100644 index 945e0a5a..00000000 --- a/tf/environments/production/tier1/.terraform.lock.hcl +++ /dev/null @@ -1,25 +0,0 @@ -# This file is maintained automatically by "terraform init". -# Manual edits may be lost in future updates. - -provider "registry.terraform.io/hashicorp/aws" { - version = "5.33.0" - constraints = ">= 3.29.0, >= 4.57.0, >= 4.66.1, >= 5.20.0, >= 5.31.0" - hashes = [ - "h1:kPm7PkwHh6tZ74pUj5C/QRPtauxdnzrEG2yhCJla/4o=", - "zh:10bb683f2a9306e881f51a971ad3b2bb654ac94b54945dd63769876a343b5b04", - "zh:3916406db958d5487ea0c2d2320012d1907c29e6d01bf693560fe05e38ee0601", - "zh:3cb54b76b2f9e30620f3281ab7fb20633b1e4584fc84cc4ecd5752546252e86f", - "zh:513bcfd6971482215c5d64725189f875cbcbd260c6d11f0da4d66321efd93a92", - "zh:545a34427ebe7a950056627e7c980c9ba16318bf086d300eb808ffc41c52b7a8", - "zh:5a44b90faf1c8e8269f389c04bfac25ad4766d26360e7f7ac371be12a442981c", - "zh:64e1ef83162f78538dccad8b035577738851395ba774d6919cb21eb465a21e3a", - "zh:7315c70cb6b7f975471ea6129474639a08c58c071afc95a36cfaa41a13ae7fb9", - "zh:9806faae58938d638b757f54414400be998dddb45edfd4a29c85e827111dc93d", - "zh:997fa2e2db242354d9f772fba7eb17bd6d18d28480291dd93f85a18ca0a67ac2", - "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", - "zh:9f9e076b7e9752971f39eead6eda69df1c5e890c82ba2ca95f56974af7adfe79", - "zh:b1d6af047f96de7f97d38b685654f1aed4356d5060b0e696d87d0270f5d49f75", - "zh:bfb0654b6f34398aeffdf907b744af06733d168db610a2c5747263380f817ac7", - "zh:e25203ee8cedccf60bf450950d533d3c172509bda8af97dbc3bc817d2a503c57", - ] -} diff --git a/tf/environments/production/tier1/templates/clickhouse-setup.sh b/tf/environments/production/tier1/templates/clickhouse-setup.sh deleted file mode 100644 index ddf49f13..00000000 --- a/tf/environments/production/tier1/templates/clickhouse-setup.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -export DEBIAN_FRONTEND=noninteractive -sudo hostnamectl set-hostname --static ${hostname} - -# Install datadog agent -DD_API_KEY=${datadog_api_key} DD_SITE="datadoghq.eu" bash -c "$(curl -L https://s3.amazonaws.com/dd-agent/scripts/install_script_agent7.sh)" - -# Install clickhouse following the instructions at: https://clickhouse.com/docs/en/install -sudo apt-get install -y apt-transport-https ca-certificates dirmngr -GNUPGHOME=$(mktemp -d) -sudo GNUPGHOME="$GNUPGHOME" gpg --no-default-keyring --keyring /usr/share/keyrings/clickhouse-keyring.gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 8919F6BD2B48D754 -sudo rm -rf "$GNUPGHOME" -sudo chmod +r /usr/share/keyrings/clickhouse-keyring.gpg -echo "deb [signed-by=/usr/share/keyrings/clickhouse-keyring.gpg] https://packages.clickhouse.com/deb stable main" | sudo tee \ -/etc/apt/sources.list.d/clickhouse.list -sudo apt-get update -sudo apt install -y clickhouse-server clickhouse-client -sudo systemctl start clickhouse-server -sudo systemctl enable clickhouse-server - -# Configure the ebs data volume -sudo service clickhouse-server stop -sudo mkfs.ext4 -q -F ${device_name} -sudo mkdir -p /var/lib/clickhouse -sudo mount ${device_name} /var/lib/clickhouse -echo "${device_name} /var/lib/clickhouse ext4 defaults,nofail 0 2" | sudo tee -a /etc/fstab -sudo chown -R clickhouse:clickhouse /var/lib/clickhouse -sudo service clickhouse-server start \ No newline at end of file diff --git a/tf/environments/production/tier1/variables.tf b/tf/environments/production/variables.tf similarity index 70% rename from tf/environments/production/tier1/variables.tf rename to tf/environments/production/variables.tf index 7d7e56e6..98813410 100644 --- a/tf/environments/production/tier1/variables.tf +++ b/tf/environments/production/variables.tf @@ -1,6 +1,12 @@ -variable datadog_api_key {} -variable aws_access_key {} -variable aws_secret_access_key {} +variable "datadog_api_key" { + sensitive = true +} +variable "aws_access_key_id" { + sensitive = true +} +variable "aws_secret_access_key" { + sensitive = true +} variable "aws_region" { description = "The AWS region to create things in." @@ -15,7 +21,17 @@ variable "az_count" { variable "key_name" { description = "Name of AWS key pair" - default = "ec2-arturo" + default = "ooni-devops-prod" +} + +variable "ooni_service_config" { + type = object({ + dataapi_version = string + }) + default = { + dataapi_version = "latest" + } + description = "configuration for ooni services" } variable "instance_type" { diff --git a/tf/environments/production/tier1/versions.tf b/tf/environments/production/versions.tf similarity index 100% rename from tf/environments/production/tier1/versions.tf rename to tf/environments/production/versions.tf