This commit is contained in:
2024-05-23 13:21:16 +02:00
parent 73b9353ada
commit 94515871fa
23 changed files with 618 additions and 504 deletions

View File

@@ -57,7 +57,7 @@ module "oauth2" {
namespace = var.namespace
domain = var.domain
labels = local.common_labels
dns_name = local.dns_name
dns_name = "${local.dns_name}/"
redirect_path = ""
providers = {
kubernetes = kubernetes

View File

@@ -1,73 +0,0 @@
# Source: authentik/templates/server-deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: authentik-server
labels:
helm.sh/chart: authentik-2023.10.7
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.10.7"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/component: "server"
spec:
strategy:
{}
selector:
matchLabels:
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/component: "server"
template:
metadata:
labels:
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/component: "server"
app.kubernetes.io/version: "2023.10.7"
annotations:
goauthentik.io/config-checksum: 92e0692364c90a8b3c4cb0bf6b95463d3dd5910bb3b3a830679ef1e94856a1c1
spec:
enableServiceLinks: true
securityContext:
{}
containers:
- name: authentik
image: "ghcr.io/goauthentik/server:2023.10.7"
imagePullPolicy: "IfNotPresent"
args: ["server"]
env:
envFrom:
- secretRef:
name: authentik
volumeMounts:
ports:
- name: http
containerPort: 9000
protocol: TCP
- name: http-metrics
containerPort: 9300
protocol: TCP
- name: https
containerPort: 9443
protocol: TCP
livenessProbe:
httpGet:
path: /-/health/live/
port: http
initialDelaySeconds: 5
periodSeconds: 10
startupProbe:
failureThreshold: 60
httpGet:
path: /-/health/live/
port: http
periodSeconds: 5
readinessProbe:
httpGet:
path: /-/health/ready/
port: http
periodSeconds: 10
securityContext:
{}
volumes:

View File

@@ -1,47 +0,0 @@
# Source: authentik/templates/worker-deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: authentik-worker
labels:
helm.sh/chart: authentik-2023.10.7
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.10.7"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/component: "worker"
spec:
strategy:
{}
selector:
matchLabels:
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/component: "worker"
template:
metadata:
labels:
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/component: "worker"
app.kubernetes.io/version: "2023.10.7"
annotations:
goauthentik.io/config-checksum: 92e0692364c90a8b3c4cb0bf6b95463d3dd5910bb3b3a830679ef1e94856a1c1
spec:
serviceAccountName: authentik
enableServiceLinks: true
securityContext:
{}
containers:
- name: authentik
image: "ghcr.io/goauthentik/server:2023.10.7"
imagePullPolicy: "IfNotPresent"
args: ["worker"]
env:
envFrom:
- secretRef:
name: authentik
volumeMounts:
securityContext:
{}
volumes:

View File

@@ -29,6 +29,36 @@ resource "kubectl_manifest" "authentik_secret" {
length: "32"
EOF
}
resource "kubectl_manifest" "cm" {
yaml_body = <<-EOF
apiVersion: v1
kind: ConfigMap
metadata:
name: "${var.instance}-${var.component}"
namespace: "${var.namespace}"
labels: ${jsonencode(local.common_labels)}
data:
AUTHENTIK_EMAIL__PORT: "${var.email.port}"
AUTHENTIK_EMAIL__TIMEOUT: "${var.email.timeout}"
AUTHENTIK_EMAIL__USE_TLS "${var.email.use_tls}"
AUTHENTIK_EMAIL__USE_SSL: "${var.email.use_ssl}"
AUTHENTIK_ERROR_REPORTING__ENABLED: "${var.error_reporting.enabled}"
AUTHENTIK_ERROR_REPORTING__ENVIRONMENT: "${var.error_reporting.environment}"
AUTHENTIK_ERROR_REPORTING__SEND_PII: "${var.error_reporting.send_pii}"
AUTHENTIK_GEOIP: "${var.geoip}"
AUTHENTIK_LOG_LEVEL: "${var.loglevel}"
AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE: "${var.images.app.registry}/${var.images.app.project}/%(type)s:%(version)s"
AUTHENTIK_POSTGRESQL__HOST: ${var.instance}-${var.component}-pool.${var.namespace}.svc
AUTHENTIK_POSTGRESQL__NAME: "${var.component}"
AUTHENTIK_POSTGRESQL__PORT: "5432"
AUTHENTIK_POSTGRESQL__USER: "${var.component}"
AUTHENTIK_REDIS__HOST: "${var.name}-${var.component}-redis"
AUTHENTIK_BOOTSTRAP_EMAIL: "${var.admin.email}@${var.domain_name}"
GUNICORN_CMD_ARGS: "--timeout=90"
EOF
}
resource "kubectl_manifest" "pre_backup_sa" {
count = var.backups.enable?1:0
ignore_fields = ["metadata.annotations"]

View File

@@ -0,0 +1,42 @@
resource "kubectl_manifest" "Service_metrics" {
yaml_body = <<-EOF
apiVersion: v1
kind: Service
metadata:
name: "${var.instance}-${var.component}-metrics"
namespace: ${var.namespace}
labels: ${jsonencode(local.metrics_all_labels)}
spec:
type: ClusterIP
ports:
- name: metrics
protocol: TCP
port: 9300
targetPort: metrics
selector: ${jsonencode(local.server_labels)}
EOF
}
resource "kubectl_manifest" "Service_server" {
yaml_body = <<-EOF
apiVersion: v1
kind: Service
metadata:
name: "${var.instance}-${var.component}"
namespace: ${var.namespace}
labels: ${jsonencode(local.server_all_labels)}
spec:
type: ClusterIP
ports:
- name: http
protocol: TCP
port: 80
targetPort: 9000
- name: https
protocol: TCP
port: 443
targetPort: 9443
selector: ${jsonencode(local.server_labels)}
EOF
}

View File

@@ -1,18 +1,16 @@
# Source: authentik/templates/prom-rules.yaml
apiVersion: monitoring.coreos.com/v1
kind: PrometheusRule
metadata:
name: authentik
labels:
helm.sh/chart: authentik-2023.10.7
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.10.7"
app.kubernetes.io/managed-by: Helm
spec:
groups:
- name: authentik Aggregate request counters
rules:
resource "kubectl_manifest" "PrometheusRule_authentik" {
count = var.conditions.have_prometheusrules?1:0
yaml_body = <<-EOF
apiVersion: monitoring.coreos.com/v1
kind: PrometheusRule
metadata:
name: authentik
namespace: ${var.namespace}
labels: ${jsonencode(local.metrics_all_labels)}
spec:
groups:
- name: authentik Aggregate request counters
rules:
- record: job:django_http_requests_before_middlewares_total:sum_rate30s
expr: sum(rate(django_http_requests_before_middlewares_total[30s])) by (job)
- record: job:django_http_requests_unknown_latency_total:sum_rate30s
@@ -51,52 +49,50 @@ spec:
expr: sum(rate(django_http_exceptions_total_by_type[30s])) by (job,type)
- record: job:django_http_exceptions_total_by_view:sum_rate30s
expr: sum(rate(django_http_exceptions_total_by_view[30s])) by (job,view)
- name: authentik Aggregate latency histograms
rules:
- name: authentik Aggregate latency histograms
rules:
- record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s
expr: histogram_quantile(0.50, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le))
labels:
quantile: "50"
quantile: '50'
- record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s
expr: histogram_quantile(0.95, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le))
labels:
quantile: "95"
quantile: '95'
- record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s
expr: histogram_quantile(0.99, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le))
labels:
quantile: "99"
quantile: '99'
- record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s
expr: histogram_quantile(0.999, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le))
labels:
quantile: "99.9"
quantile: '99.9'
- record: job:django_http_requests_latency_seconds:quantile_rate30s
expr: histogram_quantile(0.50, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le))
labels:
quantile: "50"
quantile: '50'
- record: job:django_http_requests_latency_seconds:quantile_rate30s
expr: histogram_quantile(0.95, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le))
labels:
quantile: "95"
quantile: '95'
- record: job:django_http_requests_latency_seconds:quantile_rate30s
expr: histogram_quantile(0.99, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le))
labels:
quantile: "99"
quantile: '99'
- record: job:django_http_requests_latency_seconds:quantile_rate30s
expr: histogram_quantile(0.999, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le))
labels:
quantile: "99.9"
- name: authentik Aggregate model operations
rules:
quantile: '99.9'
- name: authentik Aggregate model operations
rules:
- record: job:django_model_inserts_total:sum_rate1m
expr: sum(rate(django_model_inserts_total[1m])) by (job, model)
- record: job:django_model_updates_total:sum_rate1m
expr: sum(rate(django_model_updates_total[1m])) by (job, model)
- record: job:django_model_deletes_total:sum_rate1m
expr: sum(rate(django_model_deletes_total[1m])) by (job, model)
- name: authentik Aggregate database operations
rules:
- name: authentik Aggregate database operations
rules:
- record: job:django_db_new_connections_total:sum_rate30s
expr: sum(rate(django_db_new_connections_total[30s])) by (alias, vendor)
- record: job:django_db_new_connection_errors_total:sum_rate30s
@@ -107,56 +103,69 @@ spec:
expr: sum(rate(django_db_execute_many_total[30s])) by (alias, vendor)
- record: job:django_db_errors_total:sum_rate30s
expr: sum(rate(django_db_errors_total[30s])) by (alias, vendor, type)
- name: authentik Aggregate migrations
rules:
- name: authentik Aggregate migrations
rules:
- record: job:django_migrations_applied_total:max
expr: max(django_migrations_applied_total) by (job, connection)
- record: job:django_migrations_unapplied_total:max
expr: max(django_migrations_unapplied_total) by (job, connection)
- name: authentik Alerts
rules:
- name: authentik Alerts
rules:
- alert: NoWorkersConnected
labels:
severity: critical
expr: max without (pid) (authentik_admin_workers) < 1
for: 10m
annotations:
summary: No workers connected
message: authentik instance {{ $labels.instance }}'s worker are either not running or not connected.
- alert: PendingMigrations
labels:
severity: critical
expr: max without (pid) (django_migrations_unapplied_total) > 0
for: 10m
annotations:
summary: Pending database migrations
message: authentik instance {{ $labels.instance }} has pending database migrations
- alert: FailedSystemTasks
labels:
severity: critical
expr: sum(increase(authentik_system_tasks{status="error"}[2h])) > 0
for: 2h
annotations:
summary: Failed system tasks
message: System task {{ $labels.task_name }} has failed
- alert: DisconnectedOutposts
labels:
severity: critical
expr: sum by (outpost) (max without (pid) (authentik_outposts_connected{uid!~"specific.*"})) < 1
for: 30m
annotations:
summary: Disconnected outpost
message: Outpost {{ $labels.outpost }} has at least 1 disconnected instance
message: Outpost {{ $labels.outpost }} has at least 1 disconnected instance
EOF
}
resource "kubectl_manifest" "ServiceMonitor_authentik-server" {
count = var.conditions.have_servicemonitors?1:0
yaml_body = <<-EOF
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: authentik-server
namespace: ${var.namespace}
labels: ${jsonencode(local.metrics_all_labels)}
spec:
endpoints:
- port: metrics
interval: 30s
scrapeTimeout: 3s
path: /metrics
namespaceSelector:
matchNames:
- ${var.namespace}
selector:
matchLabels: ${jsonencode(local.metrics_labels)}
EOF
}

View File

@@ -0,0 +1,138 @@
resource "kubectl_manifest" "sa" {
yaml_body = <<-EOF
apiVersion: v1
kind: ServiceAccount
metadata:
name: authentik
namespace: ${var.namespace}
labels: ${jsonencode(local.worker_all_labels)}
EOF
}
resource "kubectl_manifest" "cr" {
yaml_body = <<-EOF
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: "${var.namespace}-${var.instance}-${var.component}"
labels: ${jsonencode(local.worker_all_labels)}
rules:
- apiGroups:
- apiextensions.k8s.io
resources:
- customresourcedefinitions
verbs:
- list
EOF
}
resource "kubectl_manifest" "crb" {
yaml_body = <<-EOF
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: ${kubectl_manifest.cr.name}
labels: ${jsonencode(local.worker_all_labels)}
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: ${kubectl_manifest.cr.name}
subjects:
- kind: ServiceAccount
name: ${kubectl_manifest.sa.name}
namespace: ${var.namespace}
EOF
}
resource "kubectl_manifest" "role" {
yaml_body = <<-EOF
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: "${var.instance}-${var.component}"
namespace: ${var.namespace}
labels: ${jsonencode(local.worker_all_labels)}
rules:
- apiGroups:
- ''
resources:
- secrets
- services
- configmaps
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- extensions
- apps
resources:
- deployments
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- extensions
- networking.k8s.io
resources:
- ingresses
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- traefik.containo.us
- traefik.io
resources:
- middlewares
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- monitoring.coreos.com
resources:
- servicemonitors
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- apiextensions.k8s.io
resources:
- customresourcedefinitions
verbs:
- list
EOF
}
resource "kubectl_manifest" "rb" {
yaml_body = <<-EOF
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: ${kubectl_manifest.role.name}
namespace: ${var.namespace}
labels: ${jsonencode(local.worker_all_labels)}
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: ${kubectl_manifest.role.name}
subjects:
- kind: ServiceAccount
name: ${kubectl_manifest.sa.name}
namespace: ${var.namespace}
EOF
}

View File

@@ -0,0 +1,115 @@
resource "kubectl_manifest" "Deployment_worker" {
yaml_body = <<-EOF
apiVersion: apps/v1
kind: Deployment
metadata:
name: "${var.instance}-${var.component}-worker"
namespace: ${var.namespace}
labels: ${jsonencode(local.worker_all_labels)}
annotations:
configmap.reloader.stakater.com/reload: "${kubectl_manifest.cm.name}"
secret.reloader.stakater.com/reload: "${kubectl_manifest.authentik_secret.name}"
spec:
revisionHistoryLimit: 3
selector:
matchLabels: ${jsonencode(local.worker_labels)}
template:
metadata:
labels: ${jsonencode(local.worker_labels)}
annotations:
spec:
serviceAccountName: ${kubectl_manifest.sa.name}
terminationGracePeriodSeconds: 30
affinity:
podAntiAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- weight: 100
podAffinityTerm:
labelSelector:
matchLabels: ${jsonencode(local.worker_labels)}
topologyKey: kubernetes.io/hostname
enableServiceLinks: true
containers:
- name: worker
image: "${var.images.app.registry}/${var.images.app.repository}:${var.images.app.tag}"
imagePullPolicy: ${var.images.app.pull_policy}
args: ["worker"]
env:
- name: AUTHENTIK_POSTGRESQL__PASSWORD
valueFrom:
secretKeyRef:
name: ${var.instance}-${var.component}-pg-app
key: password
envFrom:
- configMapRef:
name: "${kubectl_manifest.cm.name}"
- secretRef:
name: "${kubectl_manifest.authentik_secret.name}"
livenessProbe:
exec:
command: ["ak", "healthcheck"]
failureThreshold: 3
initialDelaySeconds: 5
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 1
readinessProbe:
exec:
command: ["ak", "healthcheck"]
failureThreshold: 3
initialDelaySeconds: 5
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 1
startupProbe:
exec:
command: ["ak", "healthcheck"]
failureThreshold: 60
initialDelaySeconds: 30
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 1
resources: {}
EOF
}
resource "kubectl_manifest" "PodDisruptionBudget_authentik-worker" {
yaml_body = <<-EOF
apiVersion: policy/v1
kind: PodDisruptionBudget
metadata:
name: ${kubectl_manifest.Deployment_worker.name}
namespace: ${var.namespace}
labels: ${jsonencode(local.worker_all_labels)}
spec:
minAvailable: 0
selector:
matchLabels: ${jsonencode(local.worker_labels)}
EOF
}
resource "kubectl_manifest" "HorizontalPodAutoscaler_authentik-worker" {
yaml_body = <<-EOF
apiVersion: autoscaling/v2
kind: HorizontalPodAutoscaler
metadata:
name: ${kubectl_manifest.Deployment_worker.name}
namespace: ${var.namespace}
labels: ${jsonencode(local.worker_all_labels)}
spec:
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: ${kubectl_manifest.Deployment_worker.name}
minReplicas: 1
maxReplicas: 5
metrics:
- type: Resource
resource:
name: cpu
target:
type: Utilization
averageUtilization: 50
EOF
}

View File

@@ -0,0 +1,187 @@
resource "kubectl_manifest" "Deployment_server" {
yaml_body = join("", concat([<<EOF
apiVersion: apps/v1
kind: Deployment
metadata:
name: "${var.instance}-${var.component}-server"
namespace: ${var.namespace}
labels: ${jsonencode(local.server_all_labels)}
annotations:
configmap.reloader.stakater.com/reload: "${(var.customisation.configmap_name!="" && (var.customisation.use_icon_left || var.customisation.use_custom_css))?"${kubectl_manifest.cm.name},${var.customisation.configmap_name}":"${kubectl_manifest.cm.name}"}"
secret.reloader.stakater.com/reload: "${kubectl_manifest.authentik_secret.name}"
spec:
revisionHistoryLimit: 3
selector:
matchLabels: ${jsonencode(local.server_labels)}
template:
metadata:
labels: ${jsonencode(local.server_labels)}
annotations:
prometheus.io/port: '9300'
prometheus.io/scrape: 'true'
spec:
terminationGracePeriodSeconds: 30
affinity:
podAntiAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- weight: 100
podAffinityTerm:
labelSelector:
matchLabels: ${jsonencode(local.server_labels)}
topologyKey: kubernetes.io/hostname
enableServiceLinks: true
containers:
- name: server
image: "${var.images.app.registry}/${var.images.app.repository}:${var.images.app.tag}"
imagePullPolicy: ${var.images.app.pull_policy}
args:
- server
env:
- name: AUTHENTIK_POSTGRESQL__PASSWORD
valueFrom:
secretKeyRef:
name: ${var.instance}-${var.component}-pg-app
key: password
- name: AUTHENTIK_LISTEN__HTTP
value: 0.0.0.0:9000
- name: AUTHENTIK_LISTEN__HTTPS
value: 0.0.0.0:9443
- name: AUTHENTIK_LISTEN__METRICS
value: 0.0.0.0:9300
envFrom:
- configMapRef:
name: "${kubectl_manifest.cm.name}"
- secretRef:
name: "${kubectl_manifest.authentik_secret.name}"
ports:
- name: http
containerPort: 9000
protocol: TCP
- name: https
containerPort: 9443
protocol: TCP
- name: metrics
containerPort: 9300
protocol: TCP
livenessProbe:
failureThreshold: 3
httpGet:
path: /-/health/live/
port: http
initialDelaySeconds: 5
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 1
readinessProbe:
failureThreshold: 3
httpGet:
path: /-/health/ready/
port: http
initialDelaySeconds: 5
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 1
startupProbe:
failureThreshold: 60
httpGet:
path: /-/health/live/
port: http
initialDelaySeconds: 5
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 1
resources: {}
EOF
], var.customisation.configmap_name!="" && var.customisation.use_icon_left && var.customisation.use_custom_css?[<<EOF
volumeMounts:
- name: custom-css
mountPath: /web/dist/custom.css
subPath: custom.css
- name: custom-left
mountPath: /web/dist/assets/icons/icon_left_brand.svg
subPath: icon_left_brand.svg
volumes:
- name: custom-css
configMap:
name: "${var.customisation.configmap_name}"
items:
- key: custom.css
path: custom.css
- name: custom-left
configMap:
name: "${var.customisation.configmap_name}"
items:
- key: icon_left_brand.svg
path: icon_left_brand.svg
EOF
]
:var.customisation.configmap_name!="" && var.customisation.use_icon_left && !var.customisation.use_custom_css?[<<EOF
volumeMounts:
- name: custom-left
mountPath: /web/dist/assets/icons/icon_left_brand.svg
subPath: icon_left_brand.svg
volumes:
- name: custom-left
configMap:
name: "${var.customisation.configmap_name}"
items:
- key: icon_left_brand.svg
path: icon_left_brand.svg
EOF
]
:var.customisation.configmap_name!="" && !var.customisation.use_icon_left && var.customisation.use_custom_css?[<<EOF
volumeMounts:
- name: custom-css
mountPath: /web/dist/custom.css
subPath: custom.css
volumes:
- name: custom-css
configMap:
name: "${var.customisation.configmap_name}"
items:
- key: custom.css
path: custom.css
EOF
]
:[""] ))
}
resource "kubectl_manifest" "HorizontalPodAutoscaler_authentik-server" {
yaml_body = <<-EOF
apiVersion: autoscaling/v2
kind: HorizontalPodAutoscaler
metadata:
name: ${kubectl_manifest.Deployment_server.name}
namespace: ${var.namespace}
labels: ${jsonencode(local.server_all_labels)}
spec:
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: ${kubectl_manifest.Deployment_server.name}
minReplicas: 1
maxReplicas: 5
metrics:
- type: Resource
resource:
name: cpu
target:
type: Utilization
averageUtilization: 50
EOF
}
resource "kubectl_manifest" "PodDisruptionBudget_authentik-server" {
yaml_body = <<-EOF
apiVersion: policy/v1
kind: PodDisruptionBudget
metadata:
name: ${kubectl_manifest.Deployment_server.name}
namespace: ${var.namespace}
labels: ${jsonencode(local.server_all_labels)}
spec:
minAvailable: 0
selector:
matchLabels: ${jsonencode(local.server_labels)}
EOF
}

View File

@@ -1,26 +0,0 @@
# Source: authentik/templates/server-hpa.yaml
apiVersion: autoscaling/v2
kind: HorizontalPodAutoscaler
metadata:
name: authentik-server
labels:
helm.sh/chart: authentik-2023.10.7
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.10.7"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/component: "server"
spec:
minReplicas: 1
maxReplicas: 5
metrics:
- resource:
name: cpu
target:
averageUtilization: 50
type: Utilization
type: Resource
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: authentik-server

View File

@@ -1,26 +0,0 @@
# Source: authentik/templates/worker-hpa.yaml
apiVersion: autoscaling/v2
kind: HorizontalPodAutoscaler
metadata:
name: authentik-worker
labels:
helm.sh/chart: authentik-2023.10.7
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.10.7"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/component: "worker"
spec:
minReplicas: 1
maxReplicas: 5
metrics:
- resource:
name: cpu
target:
averageUtilization: 80
type: Utilization
type: Resource
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: authentik-worker

View File

@@ -1,18 +1,42 @@
locals {
common_labels = {
core_labels = {
"app.kubernetes.io/name" = var.component
"app.kubernetes.io/instance" = var.instance
}
common_labels = merge({
"vynil.solidite.fr/owner-name" = var.instance
"vynil.solidite.fr/owner-namespace" = var.namespace
"vynil.solidite.fr/owner-category" = var.category
"vynil.solidite.fr/owner-component" = var.component
"app.kubernetes.io/managed-by" = "vynil"
"app.kubernetes.io/name" = var.component
"app.kubernetes.io/instance" = var.instance
}
server-annotations = (var.customisation.configmap_name!="" && (var.customisation.use_icon_left || var.customisation.use_custom_css))?{
},local.core_labels)
server_labels = merge({
"app.kubernetes.io/componant" = "server"
},local.core_labels)
server_all_labels = merge({
"app.kubernetes.io/componant" = "server"
},local.common_labels)
worker_labels = merge({
"app.kubernetes.io/componant" = "worker"
},local.core_labels)
worker_all_labels = merge({
"app.kubernetes.io/componant" = "worker"
},local.common_labels)
redis_all_labels = merge({
"app.kubernetes.io/componant" = "redis"
},local.common_labels)
metrics_labels = merge({
"app.kubernetes.io/component" = "server-metrics"
},local.core_labels)
metrics_all_labels = merge({
"app.kubernetes.io/component" = "server-metrics"
},local.common_labels)
server_annotations = (var.customisation.configmap_name!="" && (var.customisation.use_icon_left || var.customisation.use_custom_css))?{
"configmap.reloader.stakater.com/reload" = var.customisation.configmap_name
}:{}
}
data "kustomization_overlay" "data" {
namespace = var.namespace
common_labels = local.common_labels
@@ -54,7 +78,7 @@ apiVersion: apps/v1
kind: Deployment
metadata:
name: authentik-server
annotations: ${jsonencode(local.server-annotations)}
annotations: ${jsonencode(local.server_annotations)}
spec:
template:
spec:
@@ -63,8 +87,6 @@ spec:
image: "${var.images.app.registry}/${var.images.app.repository}:${var.images.app.tag}"
imagePullPolicy: "${var.images.app.pull_policy}"
env:
- name: "AUTHENTIK_POSTGRESQL__HOST"
value: "${var.instance}-${var.component}-pool.${var.namespace}.svc"
- name: AUTHENTIK_POSTGRESQL__PASSWORD
valueFrom:
secretKeyRef:
@@ -129,78 +151,4 @@ EOF
]
:[""] ))
}
patches {
target {
kind = "Deployment"
name = "authentik-worker"
}
patch = <<-EOF
apiVersion: apps/v1
kind: Deployment
metadata:
name: authentik-worker
spec:
template:
spec:
containers:
- name: authentik
image: "${var.images.app.registry}/${var.images.app.repository}:${var.images.app.tag}"
imagePullPolicy: "${var.images.app.pull_policy}"
env:
- name: "AUTHENTIK_POSTGRESQL__HOST"
value: "${var.instance}-${var.component}-pool.${var.namespace}.svc"
- name: AUTHENTIK_POSTGRESQL__PASSWORD
valueFrom:
secretKeyRef:
name: "${var.instance}-${var.component}-pg-app"
key: password
envFrom:
- secretRef:
name: ${var.component}
- configMapRef:
name: ${var.component}
EOF
}
patches {
target {
kind = "ClusterRole"
name = "authentik-vynil-auth"
}
patch = <<-EOF
- op: replace
path: /metadata/name
value: authentik-${var.namespace}
EOF
}
patches {
target {
kind = "ClusterRoleBinding"
name = "authentik-vynil-auth"
}
patch = <<-EOF
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: authentik-vynil-auth
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: authentik-${var.namespace}
subjects:
- kind: ServiceAccount
name: authentik
namespace: ${var.namespace}
EOF
}
patches {
target {
kind = "ClusterRoleBinding"
name = "authentik-vynil-auth"
}
patch = <<-EOF
- op: replace
path: /metadata/name
value: authentik-${var.namespace}
EOF
}
}

View File

@@ -209,7 +209,7 @@ options:
pull_policy: IfNotPresent
registry: ghcr.io
repository: goauthentik/server
tag: 2023.10.7
tag: 2024.4.2
kubectl:
pull_policy: IfNotPresent
registry: docker.io
@@ -235,7 +235,7 @@ options:
pull_policy: IfNotPresent
registry: ghcr.io
repository: goauthentik/server
tag: 2023.10.7
tag: 2024.4.2
kubectl:
pull_policy: IfNotPresent
registry: docker.io
@@ -262,7 +262,7 @@ options:
pull_policy: IfNotPresent
registry: ghcr.io
repository: goauthentik/server
tag: 2023.10.7
tag: 2024.4.2
properties:
project:
default: goauthentik
@@ -281,7 +281,7 @@ options:
default: goauthentik/server
type: string
tag:
default: 2023.10.7
default: 2024.4.2
type: string
type: object
kubectl:

View File

@@ -1,20 +0,0 @@
# Source: authentik/templates/prom-service-monitor.yaml
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: authentik
labels:
helm.sh/chart: authentik-2023.10.7
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.10.7"
app.kubernetes.io/managed-by: Helm
spec:
endpoints:
- port: http-metrics
scrapeTimeout: 3s
interval: 30s
selector:
matchLabels:
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik

View File

@@ -1,8 +1,8 @@
locals {
pg-labels = merge(local.common_labels, {
pg_labels = merge(local.common_labels, {
"app.kubernetes.io/component" = "pg"
})
pool-labels = merge(local.common_labels, {
pool_labels = merge(local.common_labels, {
"app.kubernetes.io/component" = "pg-pool"
})
}
@@ -14,7 +14,7 @@ resource "kubectl_manifest" "prj_pg" {
metadata:
name: "${var.instance}-${var.component}-pg"
namespace: "${var.namespace}"
labels: ${jsonencode(local.pg-labels)}
labels: ${jsonencode(local.pg_labels)}
spec:
instances: ${var.postgres.replicas}
imageName: "${var.images.postgresql.registry}/${var.images.postgresql.repository}:${var.images.postgresql.tag}"
@@ -56,7 +56,7 @@ resource "kubectl_manifest" "prj_pg_backup" {
metadata:
name: "${var.instance}-${var.component}-pg"
namespace: "${var.namespace}"
labels: ${jsonencode(local.pg-labels)}
labels: ${jsonencode(local.pg_labels)}
spec:
schedule: "${var.backups.schedule.db}"
backupOwnerReference: self
@@ -73,7 +73,7 @@ resource "kubectl_manifest" "prj_pg_pool" {
metadata:
name: "${var.instance}-${var.component}-pool"
namespace: "${var.namespace}"
labels: ${jsonencode(local.pool-labels)}
labels: ${jsonencode(local.pool_labels)}
spec:
cluster:
name: "${var.instance}-${var.component}-pg"

View File

@@ -1,19 +0,0 @@
# Source: authentik/charts/serviceAccount/templates/cluster-role-binding.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: authentik-vynil-auth
labels:
helm.sh/chart: serviceAccount-1.2.2
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.0"
app.kubernetes.io/managed-by: Helm
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: authentik-vynil-auth
subjects:
- kind: ServiceAccount
name: authentik
namespace: vynil-auth

View File

@@ -1,18 +0,0 @@
# Source: authentik/charts/serviceAccount/templates/cluster-role.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: authentik-vynil-auth
labels:
helm.sh/chart: serviceAccount-1.2.2
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.0"
app.kubernetes.io/managed-by: Helm
rules:
- apiGroups:
- apiextensions.k8s.io
resources:
- customresourcedefinitions
verbs:
- list

View File

@@ -1,20 +0,0 @@
# Source: authentik/charts/serviceAccount/templates/role-binding.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: authentik
namespace: vynil-auth
labels:
helm.sh/chart: serviceAccount-1.2.2
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.0"
app.kubernetes.io/managed-by: Helm
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: authentik
subjects:
- kind: ServiceAccount
name: authentik
namespace: vynil-auth

View File

@@ -1,74 +0,0 @@
# Source: authentik/charts/serviceAccount/templates/role.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: authentik
namespace: vynil-auth
labels:
helm.sh/chart: serviceAccount-1.2.2
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.0"
app.kubernetes.io/managed-by: Helm
rules:
- apiGroups:
- ""
resources:
- secrets
- services
- configmaps
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- extensions
- apps
resources:
- deployments
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- extensions
- networking.k8s.io
resources:
- ingresses
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- traefik.containo.us
- traefik.io
resources:
- middlewares
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- monitoring.coreos.com
resources:
- servicemonitors
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- apiextensions.k8s.io
resources:
- customresourcedefinitions
verbs:
- list

View File

@@ -5,7 +5,7 @@ resource "kubectl_manifest" "authentik_redis" {
metadata:
name: "${var.name}-${var.component}-redis"
namespace: "${var.namespace}"
labels: ${jsonencode(local.common_labels)}
labels: ${jsonencode(local.redis_all_labels)}
spec:
kubernetesConfig:
image: "${var.images.redis.registry}/${var.images.redis.repository}:${var.images.redis.tag}"

View File

@@ -0,0 +1,7 @@
const DEST=dest;
fn post_template() {
save_to_tf(`${global::DEST}/conditions.tf`, "conditions", #{
have_servicemonitors: have_crd("servicemonitors.monitoring.coreos.com"),
have_prometheusrules: have_crd("prometheusrules.monitoring.coreos.com")
});
}

View File

@@ -1,13 +0,0 @@
---
# Source: authentik/charts/serviceAccount/templates/service-account.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: authentik
namespace: vynil-auth
labels:
helm.sh/chart: serviceAccount-1.2.2
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.0"
app.kubernetes.io/managed-by: Helm

View File

@@ -1,26 +0,0 @@
# Source: authentik/templates/service.yaml
apiVersion: v1
kind: Service
metadata:
name: authentik
labels:
helm.sh/chart: authentik-2023.10.7
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.10.7"
app.kubernetes.io/managed-by: Helm
spec:
type: ClusterIP
ports:
- port: 9300
name: http-metrics
protocol: TCP
targetPort: http-metrics
- port: 80
targetPort: http
protocol: TCP
name: http
selector:
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/component: "server"