diff --git a/apps/dbgate/presentation.tf b/apps/dbgate/presentation.tf index fac0270..84caa6b 100644 --- a/apps/dbgate/presentation.tf +++ b/apps/dbgate/presentation.tf @@ -57,7 +57,7 @@ module "oauth2" { namespace = var.namespace domain = var.domain labels = local.common_labels - dns_name = local.dns_name + dns_name = "${local.dns_name}/" redirect_path = "" providers = { kubernetes = kubernetes diff --git a/share/authentik/apps_v1_Deployment_authentik-server.yaml b/share/authentik/apps_v1_Deployment_authentik-server.yaml deleted file mode 100644 index 1e1bcb3..0000000 --- a/share/authentik/apps_v1_Deployment_authentik-server.yaml +++ /dev/null @@ -1,73 +0,0 @@ -# Source: authentik/templates/server-deployment.yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: authentik-server - labels: - helm.sh/chart: authentik-2023.10.7 - app.kubernetes.io/name: authentik - app.kubernetes.io/instance: authentik - app.kubernetes.io/version: "2023.10.7" - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: "server" -spec: - strategy: - {} - selector: - matchLabels: - app.kubernetes.io/name: authentik - app.kubernetes.io/instance: authentik - app.kubernetes.io/component: "server" - template: - metadata: - labels: - app.kubernetes.io/name: authentik - app.kubernetes.io/instance: authentik - app.kubernetes.io/component: "server" - app.kubernetes.io/version: "2023.10.7" - annotations: - goauthentik.io/config-checksum: 92e0692364c90a8b3c4cb0bf6b95463d3dd5910bb3b3a830679ef1e94856a1c1 - spec: - enableServiceLinks: true - securityContext: - {} - containers: - - name: authentik - image: "ghcr.io/goauthentik/server:2023.10.7" - imagePullPolicy: "IfNotPresent" - args: ["server"] - env: - envFrom: - - secretRef: - name: authentik - volumeMounts: - ports: - - name: http - containerPort: 9000 - protocol: TCP - - name: http-metrics - containerPort: 9300 - protocol: TCP - - name: https - containerPort: 9443 - protocol: TCP - livenessProbe: - httpGet: - path: /-/health/live/ - port: http - initialDelaySeconds: 5 - periodSeconds: 10 - startupProbe: - failureThreshold: 60 - httpGet: - path: /-/health/live/ - port: http - periodSeconds: 5 - readinessProbe: - httpGet: - path: /-/health/ready/ - port: http - periodSeconds: 10 - securityContext: - {} - volumes: \ No newline at end of file diff --git a/share/authentik/apps_v1_Deployment_authentik-worker.yaml b/share/authentik/apps_v1_Deployment_authentik-worker.yaml deleted file mode 100644 index 5a14958..0000000 --- a/share/authentik/apps_v1_Deployment_authentik-worker.yaml +++ /dev/null @@ -1,47 +0,0 @@ -# Source: authentik/templates/worker-deployment.yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: authentik-worker - labels: - helm.sh/chart: authentik-2023.10.7 - app.kubernetes.io/name: authentik - app.kubernetes.io/instance: authentik - app.kubernetes.io/version: "2023.10.7" - app.kubernetes.io/managed-by: Helm - app.kubernetes.io/component: "worker" -spec: - strategy: - {} - selector: - matchLabels: - app.kubernetes.io/name: authentik - app.kubernetes.io/instance: authentik - app.kubernetes.io/component: "worker" - template: - metadata: - labels: - app.kubernetes.io/name: authentik - app.kubernetes.io/instance: authentik - app.kubernetes.io/component: "worker" - app.kubernetes.io/version: "2023.10.7" - annotations: - goauthentik.io/config-checksum: 92e0692364c90a8b3c4cb0bf6b95463d3dd5910bb3b3a830679ef1e94856a1c1 - spec: - serviceAccountName: authentik - enableServiceLinks: true - securityContext: - {} - containers: - - name: authentik - image: "ghcr.io/goauthentik/server:2023.10.7" - imagePullPolicy: "IfNotPresent" - args: ["worker"] - env: - envFrom: - - secretRef: - name: authentik - volumeMounts: - securityContext: - {} - volumes: \ No newline at end of file diff --git a/share/authentik/secret.tf b/share/authentik/authentik_Secret.tf similarity index 69% rename from share/authentik/secret.tf rename to share/authentik/authentik_Secret.tf index 4bda326..193a607 100644 --- a/share/authentik/secret.tf +++ b/share/authentik/authentik_Secret.tf @@ -29,6 +29,36 @@ resource "kubectl_manifest" "authentik_secret" { length: "32" EOF } + +resource "kubectl_manifest" "cm" { + yaml_body = <<-EOF + apiVersion: v1 + kind: ConfigMap + metadata: + name: "${var.instance}-${var.component}" + namespace: "${var.namespace}" + labels: ${jsonencode(local.common_labels)} + data: + AUTHENTIK_EMAIL__PORT: "${var.email.port}" + AUTHENTIK_EMAIL__TIMEOUT: "${var.email.timeout}" + AUTHENTIK_EMAIL__USE_TLS "${var.email.use_tls}" + AUTHENTIK_EMAIL__USE_SSL: "${var.email.use_ssl}" + AUTHENTIK_ERROR_REPORTING__ENABLED: "${var.error_reporting.enabled}" + AUTHENTIK_ERROR_REPORTING__ENVIRONMENT: "${var.error_reporting.environment}" + AUTHENTIK_ERROR_REPORTING__SEND_PII: "${var.error_reporting.send_pii}" + AUTHENTIK_GEOIP: "${var.geoip}" + AUTHENTIK_LOG_LEVEL: "${var.loglevel}" + AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE: "${var.images.app.registry}/${var.images.app.project}/%(type)s:%(version)s" + AUTHENTIK_POSTGRESQL__HOST: ${var.instance}-${var.component}-pool.${var.namespace}.svc + AUTHENTIK_POSTGRESQL__NAME: "${var.component}" + AUTHENTIK_POSTGRESQL__PORT: "5432" + AUTHENTIK_POSTGRESQL__USER: "${var.component}" + AUTHENTIK_REDIS__HOST: "${var.name}-${var.component}-redis" + AUTHENTIK_BOOTSTRAP_EMAIL: "${var.admin.email}@${var.domain_name}" + GUNICORN_CMD_ARGS: "--timeout=90" +EOF +} + resource "kubectl_manifest" "pre_backup_sa" { count = var.backups.enable?1:0 ignore_fields = ["metadata.annotations"] diff --git a/share/authentik/authentik_Service.tf b/share/authentik/authentik_Service.tf new file mode 100644 index 0000000..927ad22 --- /dev/null +++ b/share/authentik/authentik_Service.tf @@ -0,0 +1,42 @@ +resource "kubectl_manifest" "Service_metrics" { + yaml_body = <<-EOF + apiVersion: v1 + kind: Service + metadata: + name: "${var.instance}-${var.component}-metrics" + namespace: ${var.namespace} + labels: ${jsonencode(local.metrics_all_labels)} + spec: + type: ClusterIP + ports: + - name: metrics + protocol: TCP + port: 9300 + targetPort: metrics + selector: ${jsonencode(local.server_labels)} +EOF +} + +resource "kubectl_manifest" "Service_server" { + yaml_body = <<-EOF + apiVersion: v1 + kind: Service + metadata: + name: "${var.instance}-${var.component}" + namespace: ${var.namespace} + labels: ${jsonencode(local.server_all_labels)} + spec: + type: ClusterIP + ports: + - name: http + protocol: TCP + port: 80 + targetPort: 9000 + - name: https + protocol: TCP + port: 443 + targetPort: 9443 + selector: ${jsonencode(local.server_labels)} +EOF +} + diff --git a/share/authentik/monitoring.coreos.com_v1_PrometheusRule_authentik.yaml b/share/authentik/authentik_monitoring.tf similarity index 83% rename from share/authentik/monitoring.coreos.com_v1_PrometheusRule_authentik.yaml rename to share/authentik/authentik_monitoring.tf index b2f4220..500f46e 100644 --- a/share/authentik/monitoring.coreos.com_v1_PrometheusRule_authentik.yaml +++ b/share/authentik/authentik_monitoring.tf @@ -1,18 +1,16 @@ -# Source: authentik/templates/prom-rules.yaml -apiVersion: monitoring.coreos.com/v1 -kind: PrometheusRule -metadata: - name: authentik - labels: - helm.sh/chart: authentik-2023.10.7 - app.kubernetes.io/name: authentik - app.kubernetes.io/instance: authentik - app.kubernetes.io/version: "2023.10.7" - app.kubernetes.io/managed-by: Helm -spec: - groups: - - name: authentik Aggregate request counters - rules: +resource "kubectl_manifest" "PrometheusRule_authentik" { + count = var.conditions.have_prometheusrules?1:0 + yaml_body = <<-EOF + apiVersion: monitoring.coreos.com/v1 + kind: PrometheusRule + metadata: + name: authentik + namespace: ${var.namespace} + labels: ${jsonencode(local.metrics_all_labels)} + spec: + groups: + - name: authentik Aggregate request counters + rules: - record: job:django_http_requests_before_middlewares_total:sum_rate30s expr: sum(rate(django_http_requests_before_middlewares_total[30s])) by (job) - record: job:django_http_requests_unknown_latency_total:sum_rate30s @@ -51,52 +49,50 @@ spec: expr: sum(rate(django_http_exceptions_total_by_type[30s])) by (job,type) - record: job:django_http_exceptions_total_by_view:sum_rate30s expr: sum(rate(django_http_exceptions_total_by_view[30s])) by (job,view) - - - name: authentik Aggregate latency histograms - rules: + - name: authentik Aggregate latency histograms + rules: - record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s expr: histogram_quantile(0.50, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le)) labels: - quantile: "50" + quantile: '50' - record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s expr: histogram_quantile(0.95, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le)) labels: - quantile: "95" + quantile: '95' - record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s expr: histogram_quantile(0.99, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le)) labels: - quantile: "99" + quantile: '99' - record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s expr: histogram_quantile(0.999, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le)) labels: - quantile: "99.9" + quantile: '99.9' - record: job:django_http_requests_latency_seconds:quantile_rate30s expr: histogram_quantile(0.50, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le)) labels: - quantile: "50" + quantile: '50' - record: job:django_http_requests_latency_seconds:quantile_rate30s expr: histogram_quantile(0.95, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le)) labels: - quantile: "95" + quantile: '95' - record: job:django_http_requests_latency_seconds:quantile_rate30s expr: histogram_quantile(0.99, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le)) labels: - quantile: "99" + quantile: '99' - record: job:django_http_requests_latency_seconds:quantile_rate30s expr: histogram_quantile(0.999, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le)) labels: - quantile: "99.9" - - - name: authentik Aggregate model operations - rules: + quantile: '99.9' + - name: authentik Aggregate model operations + rules: - record: job:django_model_inserts_total:sum_rate1m expr: sum(rate(django_model_inserts_total[1m])) by (job, model) - record: job:django_model_updates_total:sum_rate1m expr: sum(rate(django_model_updates_total[1m])) by (job, model) - record: job:django_model_deletes_total:sum_rate1m expr: sum(rate(django_model_deletes_total[1m])) by (job, model) - - name: authentik Aggregate database operations - rules: + - name: authentik Aggregate database operations + rules: - record: job:django_db_new_connections_total:sum_rate30s expr: sum(rate(django_db_new_connections_total[30s])) by (alias, vendor) - record: job:django_db_new_connection_errors_total:sum_rate30s @@ -107,56 +103,69 @@ spec: expr: sum(rate(django_db_execute_many_total[30s])) by (alias, vendor) - record: job:django_db_errors_total:sum_rate30s expr: sum(rate(django_db_errors_total[30s])) by (alias, vendor, type) - - - name: authentik Aggregate migrations - rules: + - name: authentik Aggregate migrations + rules: - record: job:django_migrations_applied_total:max expr: max(django_migrations_applied_total) by (job, connection) - record: job:django_migrations_unapplied_total:max expr: max(django_migrations_unapplied_total) by (job, connection) - - - name: authentik Alerts - rules: + - name: authentik Alerts + rules: - alert: NoWorkersConnected labels: severity: critical expr: max without (pid) (authentik_admin_workers) < 1 for: 10m annotations: - summary: No workers connected message: authentik instance {{ $labels.instance }}'s worker are either not running or not connected. - - - - alert: PendingMigrations labels: severity: critical expr: max without (pid) (django_migrations_unapplied_total) > 0 for: 10m annotations: - summary: Pending database migrations message: authentik instance {{ $labels.instance }} has pending database migrations - - - alert: FailedSystemTasks labels: severity: critical expr: sum(increase(authentik_system_tasks{status="error"}[2h])) > 0 for: 2h annotations: - summary: Failed system tasks message: System task {{ $labels.task_name }} has failed - - - alert: DisconnectedOutposts labels: severity: critical expr: sum by (outpost) (max without (pid) (authentik_outposts_connected{uid!~"specific.*"})) < 1 for: 30m annotations: - summary: Disconnected outpost - message: Outpost {{ $labels.outpost }} has at least 1 disconnected instance \ No newline at end of file + message: Outpost {{ $labels.outpost }} has at least 1 disconnected instance +EOF +} + +resource "kubectl_manifest" "ServiceMonitor_authentik-server" { + count = var.conditions.have_servicemonitors?1:0 + yaml_body = <<-EOF + apiVersion: monitoring.coreos.com/v1 + kind: ServiceMonitor + metadata: + name: authentik-server + namespace: ${var.namespace} + labels: ${jsonencode(local.metrics_all_labels)} + spec: + endpoints: + - port: metrics + interval: 30s + scrapeTimeout: 3s + path: /metrics + namespaceSelector: + matchNames: + - ${var.namespace} + selector: + matchLabels: ${jsonencode(local.metrics_labels)} +EOF +} + diff --git a/share/authentik/authentik_rbac.tf b/share/authentik/authentik_rbac.tf new file mode 100644 index 0000000..52b664b --- /dev/null +++ b/share/authentik/authentik_rbac.tf @@ -0,0 +1,138 @@ +resource "kubectl_manifest" "sa" { + yaml_body = <<-EOF + apiVersion: v1 + kind: ServiceAccount + metadata: + name: authentik + namespace: ${var.namespace} + labels: ${jsonencode(local.worker_all_labels)} +EOF +} + +resource "kubectl_manifest" "cr" { + yaml_body = <<-EOF + apiVersion: rbac.authorization.k8s.io/v1 + kind: ClusterRole + metadata: + name: "${var.namespace}-${var.instance}-${var.component}" + labels: ${jsonencode(local.worker_all_labels)} + rules: + - apiGroups: + - apiextensions.k8s.io + resources: + - customresourcedefinitions + verbs: + - list +EOF +} + +resource "kubectl_manifest" "crb" { + yaml_body = <<-EOF + apiVersion: rbac.authorization.k8s.io/v1 + kind: ClusterRoleBinding + metadata: + name: ${kubectl_manifest.cr.name} + labels: ${jsonencode(local.worker_all_labels)} + roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: ${kubectl_manifest.cr.name} + subjects: + - kind: ServiceAccount + name: ${kubectl_manifest.sa.name} + namespace: ${var.namespace} +EOF +} + +resource "kubectl_manifest" "role" { + yaml_body = <<-EOF + apiVersion: rbac.authorization.k8s.io/v1 + kind: Role + metadata: + name: "${var.instance}-${var.component}" + namespace: ${var.namespace} + labels: ${jsonencode(local.worker_all_labels)} + rules: + - apiGroups: + - '' + resources: + - secrets + - services + - configmaps + verbs: + - get + - create + - delete + - list + - patch + - apiGroups: + - extensions + - apps + resources: + - deployments + verbs: + - get + - create + - delete + - list + - patch + - apiGroups: + - extensions + - networking.k8s.io + resources: + - ingresses + verbs: + - get + - create + - delete + - list + - patch + - apiGroups: + - traefik.containo.us + - traefik.io + resources: + - middlewares + verbs: + - get + - create + - delete + - list + - patch + - apiGroups: + - monitoring.coreos.com + resources: + - servicemonitors + verbs: + - get + - create + - delete + - list + - patch + - apiGroups: + - apiextensions.k8s.io + resources: + - customresourcedefinitions + verbs: + - list +EOF +} + +resource "kubectl_manifest" "rb" { + yaml_body = <<-EOF + apiVersion: rbac.authorization.k8s.io/v1 + kind: RoleBinding + metadata: + name: ${kubectl_manifest.role.name} + namespace: ${var.namespace} + labels: ${jsonencode(local.worker_all_labels)} + roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: ${kubectl_manifest.role.name} + subjects: + - kind: ServiceAccount + name: ${kubectl_manifest.sa.name} + namespace: ${var.namespace} +EOF +} + diff --git a/share/authentik/authentik_workers.tf b/share/authentik/authentik_workers.tf new file mode 100644 index 0000000..7778d46 --- /dev/null +++ b/share/authentik/authentik_workers.tf @@ -0,0 +1,115 @@ +resource "kubectl_manifest" "Deployment_worker" { + yaml_body = <<-EOF + apiVersion: apps/v1 + kind: Deployment + metadata: + name: "${var.instance}-${var.component}-worker" + namespace: ${var.namespace} + labels: ${jsonencode(local.worker_all_labels)} + annotations: + configmap.reloader.stakater.com/reload: "${kubectl_manifest.cm.name}" + secret.reloader.stakater.com/reload: "${kubectl_manifest.authentik_secret.name}" + spec: + revisionHistoryLimit: 3 + selector: + matchLabels: ${jsonencode(local.worker_labels)} + template: + metadata: + labels: ${jsonencode(local.worker_labels)} + annotations: + spec: + serviceAccountName: ${kubectl_manifest.sa.name} + terminationGracePeriodSeconds: 30 + affinity: + podAntiAffinity: + preferredDuringSchedulingIgnoredDuringExecution: + - weight: 100 + podAffinityTerm: + labelSelector: + matchLabels: ${jsonencode(local.worker_labels)} + topologyKey: kubernetes.io/hostname + enableServiceLinks: true + containers: + - name: worker + image: "${var.images.app.registry}/${var.images.app.repository}:${var.images.app.tag}" + imagePullPolicy: ${var.images.app.pull_policy} + args: ["worker"] + env: + - name: AUTHENTIK_POSTGRESQL__PASSWORD + valueFrom: + secretKeyRef: + name: ${var.instance}-${var.component}-pg-app + key: password + envFrom: + - configMapRef: + name: "${kubectl_manifest.cm.name}" + - secretRef: + name: "${kubectl_manifest.authentik_secret.name}" + livenessProbe: + exec: + command: ["ak", "healthcheck"] + failureThreshold: 3 + initialDelaySeconds: 5 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + readinessProbe: + exec: + command: ["ak", "healthcheck"] + failureThreshold: 3 + initialDelaySeconds: 5 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + startupProbe: + exec: + command: ["ak", "healthcheck"] + failureThreshold: 60 + initialDelaySeconds: 30 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} +EOF +} + +resource "kubectl_manifest" "PodDisruptionBudget_authentik-worker" { + yaml_body = <<-EOF + apiVersion: policy/v1 + kind: PodDisruptionBudget + metadata: + name: ${kubectl_manifest.Deployment_worker.name} + namespace: ${var.namespace} + labels: ${jsonencode(local.worker_all_labels)} + spec: + minAvailable: 0 + selector: + matchLabels: ${jsonencode(local.worker_labels)} +EOF +} +resource "kubectl_manifest" "HorizontalPodAutoscaler_authentik-worker" { + yaml_body = <<-EOF + apiVersion: autoscaling/v2 + kind: HorizontalPodAutoscaler + metadata: + name: ${kubectl_manifest.Deployment_worker.name} + namespace: ${var.namespace} + labels: ${jsonencode(local.worker_all_labels)} + spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: ${kubectl_manifest.Deployment_worker.name} + minReplicas: 1 + maxReplicas: 5 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 50 +EOF +} + + diff --git a/share/authentik/authentik_workload.tf b/share/authentik/authentik_workload.tf new file mode 100644 index 0000000..ac9b872 --- /dev/null +++ b/share/authentik/authentik_workload.tf @@ -0,0 +1,187 @@ +resource "kubectl_manifest" "Deployment_server" { + yaml_body = join("", concat([<