Skip to content

Commit

Permalink
feat: update karpenter to 0.32
Browse files Browse the repository at this point in the history
  • Loading branch information
jaygridley committed Jan 16, 2024
1 parent ec4009f commit 49b9506
Show file tree
Hide file tree
Showing 22 changed files with 1,101 additions and 110 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/pre-commit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ on:
env:
PYTHON_VERSION: "3.10"
TERRAFORM_DOCS_VERSION: "v0.16.0"
TFLINT_VERSION: "v0.40.1"
TFLINT_VERSION: "v0.48.0"

jobs:
pre-commit:
Expand Down
10 changes: 7 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
rev: v4.4.0
hooks:
- id: trailing-whitespace
- id: check-merge-conflict
Expand All @@ -10,18 +10,22 @@ repos:
- id: end-of-file-fixer

- repo: https://github.com/antonbabenko/pre-commit-terraform
rev: v1.75.0
rev: v1.83.1
hooks:
- id: terraform_fmt
- id: terraform_tflint
args:
- --args=--config=__GIT_WORKING_DIR__/.tflint.hcl
- id: terraform_validate
- id: terraform_checkov
args:
- '--args=--skip-check CKV_TF_1' #CKV_TF_1: "Ensure Terraform module sources use a commit hash"
- id: terraform_docs
args:
- '--args=--config=.terraform-docs.yml'

- repo: https://github.com/Yelp/detect-secrets
rev: v1.3.0
rev: v1.4.0
hooks:
- id: detect-secrets
args: ['--baseline', '.secrets.baseline']
Expand Down
5 changes: 3 additions & 2 deletions .tflint.hcl
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
plugin "terraform" {
enabled = true
version = "0.1.1"
version = "0.4.0"
source = "github.com/terraform-linters/tflint-ruleset-terraform"
preset = "recommended"
}

plugin "aws" {
enabled = true
version = "0.17.0"
version = "0.26.0"
source = "github.com/terraform-linters/tflint-ruleset-aws"
}
6 changes: 6 additions & 0 deletions .tool-versions
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
terraform 1.1.7
terraform-docs 0.16.0
tflint 0.48.0
checkov 2.4.25
awscli 2.13.15
pre-commit 3.4.0
93 changes: 71 additions & 22 deletions README.md

Large diffs are not rendered by default.

62 changes: 62 additions & 0 deletions argo-crds.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
locals {
crds_argo_application_enabled = var.enabled && var.argo_enabled && !var.argo_helm_enabled && !var.helm_skip_crds
crds_argo_application_metadata = {
"labels" : try(var.crds_argo_metadata.labels, {}),
"annotations" : try(var.crds_argo_metadata.annotations, {}),
"finalizers" : try(var.crds_argo_metadata.finalizers, [])
}
crds_argo_application_values = {
"project" : var.argo_project
"source" : {
"repoURL" : var.helm_repo_oci ? local.helm_repo_url : "https://${local.helm_repo_url}"
"chart" : var.crds_helm_chart_name
"targetRevision" : var.crds_helm_chart_version
"helm" : {
"releaseName" : var.crds_helm_release_name
"parameters" : length(var.crds_settings) == 0 ? null : [for k, v in var.crds_settings : tomap({ "forceString" : true, "name" : k, "value" : v })]
"values" : var.enabled ? data.utils_deep_merge_yaml.crds_values[0].output : ""
}
}
"destination" : {
"server" : var.argo_destination_server
"namespace" : var.namespace
}
"syncPolicy" : var.crds_argo_sync_policy
"info" : var.argo_info
}
crds_argo_kubernetes_manifest_wait_fields = merge(
{
"status.sync.status" = "Synced"
"status.health.status" = "Healthy"
"status.operationState.phase" = "Succeeded"
},
var.crds_argo_kubernetes_manifest_wait_fields
)
}

resource "kubernetes_manifest" "crds" {
count = local.crds_argo_application_enabled ? 1 : 0
manifest = {
"apiVersion" = var.argo_apiversion
"kind" = "Application"
"metadata" = merge(
local.crds_argo_application_metadata,
{ "name" = var.crds_helm_release_name },
{ "namespace" = var.argo_namespace },
)
"spec" = merge(
local.crds_argo_application_values,
var.crds_argo_spec
)
}
computed_fields = var.crds_argo_kubernetes_manifest_computed_fields

field_manager {
name = var.crds_argo_kubernetes_manifest_field_manager_name
force_conflicts = var.crds_argo_kubernetes_manifest_field_manager_force_conflicts
}

wait {
fields = local.crds_argo_kubernetes_manifest_wait_fields
}
}
162 changes: 162 additions & 0 deletions argo-helm-crds.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
locals {
crds_helm_argo_application_enabled = var.enabled && var.argo_enabled && var.argo_helm_enabled && !var.helm_skip_crds
crds_helm_argo_application_wait_enabled = local.crds_helm_argo_application_enabled && length(keys(local.crds_argo_kubernetes_manifest_wait_fields)) > 0
crds_helm_argo_application_values = [
one(data.utils_deep_merge_yaml.crds_argo_helm_values[*].output),
var.crds_argo_helm_values
]
}

data "utils_deep_merge_yaml" "crds_argo_helm_values" {
count = local.crds_helm_argo_application_enabled ? 1 : 0
input = compact([
yamlencode({
"apiVersion" : var.argo_apiversion
}),
yamlencode({
"spec" : local.crds_argo_application_values
}),
yamlencode({
"spec" : var.crds_argo_spec
}),
yamlencode(
local.crds_argo_application_metadata
)
])
}

resource "helm_release" "crds_argo_application" {
count = local.crds_helm_argo_application_enabled ? 1 : 0

chart = "${path.module}/helm/argocd-application"
name = var.crds_helm_release_name
namespace = var.argo_namespace

values = local.crds_helm_argo_application_values
}

resource "kubernetes_role" "crds_helm_argo_application_wait" {
count = local.crds_helm_argo_application_wait_enabled ? 1 : 0

metadata {
name = "${var.crds_helm_release_name}-argo-application-wait"
namespace = var.argo_namespace
labels = local.crds_argo_application_metadata.labels
annotations = local.crds_argo_application_metadata.annotations
}

rule {
api_groups = ["argoproj.io"]
resources = ["applications"]
verbs = ["get", "list", "watch"]
}
}

resource "kubernetes_role_binding" "crds_helm_argo_application_wait" {
count = local.crds_helm_argo_application_wait_enabled ? 1 : 0

metadata {
name = "${var.crds_helm_release_name}-argo-application-wait"
namespace = var.argo_namespace
labels = local.crds_argo_application_metadata.labels
annotations = local.crds_argo_application_metadata.annotations
}

role_ref {
api_group = "rbac.authorization.k8s.io"
kind = "Role"
name = one(kubernetes_role.crds_helm_argo_application_wait[*].metadata[0].name)
}

subject {
kind = "ServiceAccount"
name = one(kubernetes_service_account.crds_helm_argo_application_wait[*].metadata[0].name)
namespace = one(kubernetes_service_account.crds_helm_argo_application_wait[*].metadata[0].namespace)
}
}

resource "kubernetes_service_account" "crds_helm_argo_application_wait" {
count = local.crds_helm_argo_application_wait_enabled ? 1 : 0

metadata {
name = "${var.crds_helm_release_name}-argo-application-wait"
namespace = var.argo_namespace
labels = local.crds_argo_application_metadata.labels
annotations = local.crds_argo_application_metadata.annotations
}
}

resource "kubernetes_job" "crds_helm_argo_application_wait" {
count = local.crds_helm_argo_application_wait_enabled ? 1 : 0

metadata {
generate_name = "${var.crds_helm_release_name}-argo-application-wait-"
namespace = var.argo_namespace
labels = local.crds_argo_application_metadata.labels
annotations = local.crds_argo_application_metadata.annotations
}

spec {
template {
metadata {
name = "${var.crds_helm_release_name}-argo-application-wait"
labels = local.crds_argo_application_metadata.labels
annotations = local.crds_argo_application_metadata.annotations
}

spec {
service_account_name = one(kubernetes_service_account.crds_helm_argo_application_wait[*].metadata[0].name)

dynamic "container" {
for_each = local.crds_argo_kubernetes_manifest_wait_fields

content {
name = "${lower(replace(container.key, ".", "-"))}-${md5(jsonencode(local.crds_helm_argo_application_values))}" # md5 suffix is a workaround for https://github.com/hashicorp/terraform-provider-kubernetes/issues/1325
image = "bitnami/kubectl:latest"
command = ["/bin/bash", "-ecx"]
# Waits for ArgoCD Application to be "Healthy", see https://kubernetes.io/docs/reference/generated/kubectl/kubectl-commands#wait
# i.e. kubectl wait --for=jsonpath='{.status.sync.status}'=Healthy application.argoproj.io <$addon-name>
args = [
<<-EOT
kubectl wait \
--namespace ${var.argo_namespace} \
--for=jsonpath='{.${container.key}}'=${container.value} \
--timeout=${var.argo_helm_wait_timeout} \
application.argoproj.io ${var.crds_helm_release_name}
EOT
]
}
}

node_selector = var.argo_helm_wait_node_selector

dynamic "toleration" {
for_each = var.argo_helm_wait_tolerations

content {
key = try(toleration.value.key, null)
operator = try(toleration.value.operator, null)
value = try(toleration.value.value, null)
effect = try(toleration.value.effect, null)
}
}

# ArgoCD Application status fields might not be available immediately after creation
restart_policy = "OnFailure"
}
}

backoff_limit = var.argo_helm_wait_backoff_limit
}

wait_for_completion = true

timeouts {
create = var.argo_helm_wait_timeout
update = var.argo_helm_wait_timeout
}

depends_on = [
helm_release.crds_argo_application
]
}
25 changes: 21 additions & 4 deletions argo-helm.tf
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,10 @@ resource "helm_release" "argo_application" {
namespace = var.argo_namespace

values = local.helm_argo_application_values

depends_on = [
kubernetes_job.crds_helm_argo_application_wait
]
}

resource "kubernetes_role" "helm_argo_application_wait" {
Expand Down Expand Up @@ -80,10 +84,10 @@ resource "kubernetes_service_account" "helm_argo_application_wait" {
count = local.helm_argo_application_wait_enabled ? 1 : 0

metadata {
name = "${var.helm_release_name}-argo-application-wait"
namespace = var.argo_namespace
labels = local.argo_application_metadata.labels
annotations = local.argo_application_metadata.annotations
generate_name = "${var.helm_release_name}-argo-application-wait-"
namespace = var.argo_namespace
labels = local.argo_application_metadata.labels
annotations = local.argo_application_metadata.annotations
}
}

Expand Down Expand Up @@ -129,6 +133,19 @@ resource "kubernetes_job" "helm_argo_application_wait" {
}
}

node_selector = var.argo_helm_wait_node_selector

dynamic "toleration" {
for_each = var.argo_helm_wait_tolerations

content {
key = try(toleration.value.key, null)
operator = try(toleration.value.operator, null)
value = try(toleration.value.value, null)
effect = try(toleration.value.effect, null)
}
}

# ArgoCD Application status fields might not be available immediately after creation
restart_policy = "OnFailure"
}
Expand Down
9 changes: 7 additions & 2 deletions argo.tf
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@ locals {
"targetRevision" : var.helm_chart_version
"helm" : {
"releaseName" : var.helm_release_name
"parameters" : [for k, v in var.settings : tomap({ "forceString" : true, "name" : k, "value" : v })]
"parameters" : length(var.settings) == 0 ? null : [for k, v in var.settings : tomap({ "forceString" : true, "name" : k, "value" : v })]
"values" : var.enabled ? data.utils_deep_merge_yaml.values[0].output : ""
"skipCrds" : true # CRDs are installed in a separate ArgoCD Application
}
}
"destination" : {
Expand All @@ -25,7 +26,7 @@ locals {
}
}

resource "kubernetes_manifest" "this" {
resource "kubernetes_manifest" "controller" {
count = var.enabled && var.argo_enabled && !var.argo_helm_enabled ? 1 : 0
manifest = {
"apiVersion" = var.argo_apiversion
Expand All @@ -50,4 +51,8 @@ resource "kubernetes_manifest" "this" {
wait {
fields = var.argo_kubernetes_manifest_wait_fields
}

depends_on = [
kubernetes_manifest.crds
]
}
10 changes: 5 additions & 5 deletions examples/basic/main.tf
Original file line number Diff line number Diff line change
@@ -1,8 +1,3 @@
resource "aws_iam_role" "this" {
name = "karpenter-node-role"
assume_role_policy = data.aws_iam_policy_document.karpenter_node_assume_policy.json
}

data "aws_iam_policy_document" "karpenter_node_assume_policy" {
statement {
actions = ["sts:AssumeRole"]
Expand All @@ -15,6 +10,11 @@ data "aws_iam_policy_document" "karpenter_node_assume_policy" {
}
}

resource "aws_iam_role" "this" {
name = "karpenter-node-role"
assume_role_policy = data.aws_iam_policy_document.karpenter_node_assume_policy.json
}

module "addon_installation_disabled" {
source = "../../"

Expand Down
Loading

0 comments on commit 49b9506

Please sign in to comment.