first commit

This commit is contained in:
2023-07-14 11:51:07 +02:00
commit 284dc650c4
101 changed files with 8629 additions and 0 deletions

View File

@@ -0,0 +1,22 @@
locals {
common-labels = {
"vynil.solidite.fr/owner-name" = var.instance
"vynil.solidite.fr/owner-namespace" = var.namespace
"vynil.solidite.fr/owner-category" = var.category
"vynil.solidite.fr/owner-component" = var.component
"app.kubernetes.io/managed-by" = "vynil"
"app.kubernetes.io/name" = var.component
"app.kubernetes.io/instance" = var.instance
}
}
data "kubernetes_secret_v1" "authentik" {
metadata {
name = "authentik"
namespace = var.namespace
}
}
data "kustomization_overlay" "data" {
resources = []
}

View File

@@ -0,0 +1,41 @@
---
apiVersion: vinyl.solidite.fr/v1beta1
kind: Component
category: share
metadata:
name: authentik-forward
description: null
options:
domain-name:
default: your_company.com
examples:
- your_company.com
type: string
sub-domain:
default: null
domain:
default: your-company
examples:
- your-company
type: string
issuer:
default: letsencrypt-prod
examples:
- letsencrypt-prod
type: string
ingress-class:
default: traefik
examples:
- traefik
type: string
dependencies:
- dist: null
category: share
component: authentik
providers:
kubernetes: true
authentik: true
kubectl: null
postgresql: null
restapi: null
http: true

View File

@@ -0,0 +1,80 @@
locals {
request_headers = {
"Content-Type" = "application/json"
Authorization = "Bearer ${local.authentik-token}"
}
authentik-token = data.kubernetes_secret_v1.authentik.data["AUTHENTIK_BOOTSTRAP_TOKEN"]
forward-outpost-json = jsondecode(data.http.get_forward_outpost.response_body).results
forward-outpost-providers = length(local.forward-outpost-json)>0?(contains(local.forward-outpost-json[0].providers, authentik_provider_proxy.provider_forward.id)?local.forward-outpost-json[0].providers:concat(local.forward-outpost-json[0].providers, [authentik_provider_proxy.provider_forward.id])):[authentik_provider_proxy.provider_forward.id]
}
data "http" "get_forward_outpost" {
depends_on = [authentik_provider_proxy.provider_forward]
url = "http://authentik.${var.namespace}.svc/api/v3/outposts/instances/?name__iexact=forward"
method = "GET"
request_headers = local.request_headers
lifecycle {
postcondition {
condition = contains([200], self.status_code)
error_message = "Status code invalid"
}
}
}
resource "authentik_service_connection_kubernetes" "local" {
depends_on = [data.kubernetes_secret_v1.authentik]
name = "local-forward"
local = true
}
data "authentik_flow" "default-authorization-flow" {
depends_on = [authentik_service_connection_kubernetes.local]
slug = "default-provider-authorization-implicit-consent"
}
resource "authentik_provider_proxy" "provider_forward" {
name = "authentik-forward-provider"
internal_host = "http://authentik"
external_host = "http://authentik"
authorization_flow = data.authentik_flow.default-authorization-flow.id
}
data "kubernetes_ingress_v1" "authentik" {
metadata {
name = "authentik"
namespace = var.namespace
}
}
resource "authentik_outpost" "outpost-forward" {
name = "forward"
type = "proxy"
service_connection = authentik_service_connection_kubernetes.local.id
config = jsonencode({
"log_level": "info",
"authentik_host": "http://authentik",
"docker_map_ports": true,
"kubernetes_replicas": 1,
"kubernetes_namespace": var.namespace,
"authentik_host_browser": "https://${data.kubernetes_ingress_v1.authentik.spec[0].rule[0].host}",
"object_naming_template": "ak-outpost-%(name)s",
"authentik_host_insecure": false,
"kubernetes_service_type": "ClusterIP",
"kubernetes_image_pull_secrets": [],
"kubernetes_disabled_components": [],
"kubernetes_ingress_annotations": {},
"kubernetes_ingress_secret_name": "authentik-outpost-tls"
})
protocol_providers = local.forward-outpost-providers
}
data "authentik_user" "akadmin" {
depends_on = [authentik_outpost.outpost-forward]
username = "akadmin"
}
resource "authentik_group" "group" {
name = "vynil-forward-admins"
users = [data.authentik_user.akadmin.id]
is_superuser = true
}

View File

@@ -0,0 +1,22 @@
locals {
common-labels = {
"vynil.solidite.fr/owner-name" = var.instance
"vynil.solidite.fr/owner-namespace" = var.namespace
"vynil.solidite.fr/owner-category" = var.category
"vynil.solidite.fr/owner-component" = var.component
"app.kubernetes.io/managed-by" = "vynil"
"app.kubernetes.io/name" = var.component
"app.kubernetes.io/instance" = var.instance
}
}
data "kubernetes_secret_v1" "authentik" {
metadata {
name = "authentik"
namespace = var.namespace
}
}
data "kustomization_overlay" "data" {
resources = []
}

View File

@@ -0,0 +1,24 @@
---
apiVersion: vinyl.solidite.fr/v1beta1
kind: Component
category: share
metadata:
name: authentik-ldap
description: null
options:
domain:
default: your-company
examples:
- your-company
type: string
dependencies:
- dist: null
category: share
component: authentik
providers:
kubernetes: true
authentik: true
kubectl: true
postgresql: null
restapi: null
http: true

View File

@@ -0,0 +1,108 @@
locals {
request_headers = {
"Content-Type" = "application/json"
Authorization = "Bearer ${local.authentik-token}"
}
authentik-token = data.kubernetes_secret_v1.authentik.data["AUTHENTIK_BOOTSTRAP_TOKEN"]
ldap-outpost-json = jsondecode(data.http.get_ldap_outpost.response_body).results
ldap-outpost-prividers = length(local.ldap-outpost-json)>0?(contains(local.ldap-outpost-json[0].providers, authentik_provider_ldap.provider_ldap.id)?local.ldap-outpost-json[0].providers:concat(local.ldap-outpost-json[0].providers, [authentik_provider_ldap.provider_ldap.id])):[authentik_provider_ldap.provider_ldap.id]
}
//TODO: trouver un moyen d'attendre que le service soit ready
data "http" "get_ldap_outpost" {
depends_on = [authentik_provider_ldap.provider_ldap]
url = "http://authentik.${var.namespace}.svc/api/v3/outposts/instances/?name__iexact=ldap"
method = "GET"
request_headers = local.request_headers
lifecycle {
postcondition {
condition = contains([200], self.status_code)
error_message = "Status code invalid"
}
}
}
resource "authentik_stage_password" "ldap-password-stage" {
depends_on = [data.kubernetes_secret_v1.authentik]
name = "ldap-authentication-password"
backends = [
"authentik.core.auth.InbuiltBackend",
"authentik.core.auth.TokenBackend",
"authentik.sources.ldap.auth.LDAPBackend"
]
}
resource "authentik_stage_identification" "ldap-identification-stage" {
name = "ldap-identification-stage"
user_fields = ["username","email"]
password_stage = authentik_stage_password.ldap-password-stage.id
}
resource "authentik_stage_user_login" "ldap-authentication-login" {
depends_on = [data.kubernetes_secret_v1.authentik]
name = "ldap-authentication-login"
}
resource "authentik_flow" "ldap-authentication-flow" {
depends_on = [data.kubernetes_secret_v1.authentik]
name = "ldap-authentication-flow"
title = "ldap authentication flow"
slug = "ldap-authentication-flow"
designation = "authentication"
}
resource "authentik_flow_stage_binding" "ldap-authentication-flow-10" {
target = authentik_flow.ldap-authentication-flow.uuid
stage = authentik_stage_identification.ldap-identification-stage.id
order = 10
}
resource "authentik_flow_stage_binding" "ldap-authentication-flow-30" {
target = authentik_flow.ldap-authentication-flow.uuid
stage = authentik_stage_user_login.ldap-authentication-login.id
order = 30
}
data "authentik_user" "akadmin" {
depends_on = [kustomization_resource.post,authentik_flow_stage_binding.ldap-authentication-flow-30]
username = "akadmin"
}
resource "authentik_group" "group" {
name = "vynil-ldap-admins"
users = [data.authentik_user.akadmin.id]
is_superuser = true
}
resource "authentik_service_connection_kubernetes" "local" {
depends_on = [data.kubernetes_secret_v1.authentik]
name = "local-ldap"
local = true
}
resource "authentik_provider_ldap" "provider_ldap" {
name = "authentik-ldap-provider"
base_dn = "dc=${var.namespace},dc=namespace"
bind_flow = authentik_flow.ldap-authentication-flow.uuid
}
resource "authentik_outpost" "outpost-ldap" {
name = "ldap"
type = "ldap"
service_connection = authentik_service_connection_kubernetes.local.id
config = jsonencode({
"log_level": "info",
"authentik_host": "http://authentik",
"docker_map_ports": true,
"kubernetes_replicas": 1,
"kubernetes_namespace": var.namespace,
"authentik_host_browser": "",
"object_naming_template": "ak-outpost-%(name)s",
"authentik_host_insecure": false,
"kubernetes_service_type": "ClusterIP",
"kubernetes_image_pull_secrets": [],
"kubernetes_disabled_components": [],
"kubernetes_ingress_annotations": {},
"kubernetes_ingress_secret_name": "authentik-outpost-tls"
})
protocol_providers = local.ldap-outpost-prividers
}

View File

@@ -0,0 +1,71 @@
# Source: authentik/templates/server-deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: authentik-server
labels:
helm.sh/chart: authentik-2023.6.3
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.1"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/component: "server"
spec:
selector:
matchLabels:
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/component: "server"
template:
metadata:
labels:
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/component: "server"
app.kubernetes.io/version: "2023.6.1"
annotations:
goauthentik.io/config-checksum: 39339b4fd4c8511ca989fe40932e07b38befc9e3642eab092900cdde5cdf8f37
spec:
enableServiceLinks: true
securityContext:
{}
containers:
- name: authentik
image: "ghcr.io/goauthentik/server:2023.6.1"
imagePullPolicy: "IfNotPresent"
args: ["server"]
env:
envFrom:
- secretRef:
name: authentik
volumeMounts:
ports:
- name: http
containerPort: 9000
protocol: TCP
- name: http-metrics
containerPort: 9300
protocol: TCP
- name: https
containerPort: 9443
protocol: TCP
livenessProbe:
httpGet:
path: /-/health/live/
port: http
initialDelaySeconds: 5
periodSeconds: 10
startupProbe:
failureThreshold: 60
httpGet:
path: /-/health/live/
port: http
periodSeconds: 5
readinessProbe:
httpGet:
path: /-/health/ready/
port: http
periodSeconds: 10
securityContext:
{}
volumes:

View File

@@ -0,0 +1,45 @@
# Source: authentik/templates/worker-deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: authentik-worker
labels:
helm.sh/chart: authentik-2023.6.3
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.1"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/component: "worker"
spec:
selector:
matchLabels:
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/component: "worker"
template:
metadata:
labels:
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/component: "worker"
app.kubernetes.io/version: "2023.6.1"
annotations:
goauthentik.io/config-checksum: 39339b4fd4c8511ca989fe40932e07b38befc9e3642eab092900cdde5cdf8f37
spec:
serviceAccountName: authentik
enableServiceLinks: true
securityContext:
{}
containers:
- name: authentik
image: "ghcr.io/goauthentik/server:2023.6.1"
imagePullPolicy: "IfNotPresent"
args: ["worker"]
env:
envFrom:
- secretRef:
name: authentik
volumeMounts:
securityContext:
{}
volumes:

View File

@@ -0,0 +1,26 @@
# Source: authentik/templates/server-hpa.yaml
apiVersion: autoscaling/v2
kind: HorizontalPodAutoscaler
metadata:
name: authentik-server
labels:
helm.sh/chart: authentik-2023.6.3
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.1"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/component: "server"
spec:
minReplicas: 1
maxReplicas: 5
metrics:
- resource:
name: cpu
target:
averageUtilization: 50
type: Utilization
type: Resource
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: authentik-server

View File

@@ -0,0 +1,26 @@
# Source: authentik/templates/worker-hpa.yaml
apiVersion: autoscaling/v2
kind: HorizontalPodAutoscaler
metadata:
name: authentik-worker
labels:
helm.sh/chart: authentik-2023.6.3
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.1"
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/component: "worker"
spec:
minReplicas: 1
maxReplicas: 5
metrics:
- resource:
name: cpu
target:
averageUtilization: 80
type: Utilization
type: Resource
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: authentik-worker

153
share/authentik/datas.tf Normal file
View File

@@ -0,0 +1,153 @@
locals {
common-labels = {
"vynil.solidite.fr/owner-name" = var.instance
"vynil.solidite.fr/owner-namespace" = var.namespace
"vynil.solidite.fr/owner-category" = var.category
"vynil.solidite.fr/owner-component" = var.component
"app.kubernetes.io/managed-by" = "vynil"
"app.kubernetes.io/name" = var.component
"app.kubernetes.io/instance" = var.instance
}
}
data "kubernetes_secret_v1" "authentik" {
metadata {
name = "authentik"
namespace = var.namespace
}
}
data "kustomization_overlay" "data" {
namespace = var.namespace
common_labels = local.common-labels
resources = [for file in fileset(path.module, "*.yaml"): file if file != "index.yaml"]
images {
name = "ghcr.io/goauthentik/server"
new_name = "${var.image.registry}/${var.image.repository}"
new_tag = "${var.image.tag}"
}
config_map_generator {
name = var.component
behavior = "create"
literals = [
"AUTHENTIK_EMAIL__PORT=${var.email.port}",
"AUTHENTIK_EMAIL__TIMEOUT=${var.email.timeout}",
"AUTHENTIK_EMAIL__USE_TLS=${var.email.use_tls}",
"AUTHENTIK_EMAIL__USE_SSL=${var.email.use_ssl}",
"AUTHENTIK_ERROR_REPORTING__ENABLED=${var.error_reporting.enabled}",
"AUTHENTIK_ERROR_REPORTING__ENVIRONMENT=${var.error_reporting.environment}",
"AUTHENTIK_ERROR_REPORTING__SEND_PII=${var.error_reporting.send_pii}",
"AUTHENTIK_GEOIP=${var.geoip}",
"AUTHENTIK_LOG_LEVEL=${var.loglevel}",
"AUTHENTIK_OUTPOSTS__CONTAINER_IMAGE_BASE=${var.image.registry}/${var.image.project}/%(type)s:%(version)s",
"AUTHENTIK_POSTGRESQL__HOST=${var.instance}-${var.component}.${var.namespace}.svc",
"AUTHENTIK_POSTGRESQL__NAME=${var.component}",
"AUTHENTIK_POSTGRESQL__PORT=5432",
"AUTHENTIK_POSTGRESQL__USER=${var.component}",
"AUTHENTIK_REDIS__HOST=${var.name}-${var.component}-redis",
"AUTHENTIK_BOOTSTRAP_EMAIL=${var.admin.email}@${var.domain-name}",
]
}
patches {
target {
kind = "Deployment"
name = "authentik-server"
}
patch = <<-EOF
apiVersion: apps/v1
kind: Deployment
metadata:
name: authentik-server
spec:
template:
spec:
containers:
- name: authentik
image: "${var.image.registry}/${var.image.repository}:${var.image.tag}"
imagePullPolicy: "${var.image.pullPolicy}"
env:
- name: AUTHENTIK_POSTGRESQL__PASSWORD
valueFrom:
secretKeyRef:
name: ${var.component}.${var.instance}-${var.component}.credentials.postgresql.acid.zalan.do
key: password
envFrom:
- secretRef:
name: ${var.component}
- configMapRef:
name: ${var.component}
EOF
}
patches {
target {
kind = "Deployment"
name = "authentik-worker"
}
patch = <<-EOF
apiVersion: apps/v1
kind: Deployment
metadata:
name: authentik-worker
spec:
template:
spec:
containers:
- name: authentik
image: "${var.image.registry}/${var.image.repository}:${var.image.tag}"
imagePullPolicy: "${var.image.pullPolicy}"
env:
- name: AUTHENTIK_POSTGRESQL__PASSWORD
valueFrom:
secretKeyRef:
name: ${var.component}.${var.name}-${var.component}.credentials.postgresql.acid.zalan.do
key: password
envFrom:
- secretRef:
name: ${var.component}
- configMapRef:
name: ${var.component}
EOF
}
patches {
target {
kind = "ClusterRole"
name = "authentik-vynil-auth"
}
patch = <<-EOF
- op: replace
path: /metadata/name
value: authentik-${var.namespace}
EOF
}
patches {
target {
kind = "ClusterRoleBinding"
name = "authentik-vynil-auth"
}
patch = <<-EOF
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: authentik-vynil-auth
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: authentik-${var.namespace}
subjects:
- kind: ServiceAccount
name: authentik
namespace: ${var.namespace}
EOF
}
patches {
target {
kind = "ClusterRoleBinding"
name = "authentik-vynil-auth"
}
patch = <<-EOF
- op: replace
path: /metadata/name
value: authentik-${var.namespace}
EOF
}
}

207
share/authentik/index.yaml Normal file
View File

@@ -0,0 +1,207 @@
---
apiVersion: vinyl.solidite.fr/v1beta1
kind: Component
category: share
metadata:
name: authentik
description: authentik is an open-source Identity Provider focused on flexibility and versatility
options:
email:
default:
port: 587
timeout: 30
use_ssl: false
use_tls: false
examples:
- port: 587
timeout: 30
use_ssl: false
use_tls: false
properties:
port:
default: 587
type: integer
timeout:
default: 30
type: integer
use_ssl:
default: false
type: boolean
use_tls:
default: false
type: boolean
type: object
geoip:
default: /geoip/GeoLite2-City.mmdb
examples:
- /geoip/GeoLite2-City.mmdb
type: string
admin:
default:
email: auth-admin
examples:
- email: auth-admin
properties:
email:
default: auth-admin
type: string
type: object
image:
default:
project: goauthentik
pullPolicy: IfNotPresent
registry: ghcr.io
repository: goauthentik/server
tag: 2023.5.4
examples:
- project: goauthentik
pullPolicy: IfNotPresent
registry: ghcr.io
repository: goauthentik/server
tag: 2023.5.4
properties:
project:
default: goauthentik
type: string
pullPolicy:
default: IfNotPresent
type: string
registry:
default: ghcr.io
type: string
repository:
default: goauthentik/server
type: string
tag:
default: 2023.5.4
type: string
type: object
error_reporting:
default:
enabled: false
environment: k8s
send_pii: false
examples:
- enabled: false
environment: k8s
send_pii: false
properties:
enabled:
default: false
type: boolean
environment:
default: k8s
type: string
send_pii:
default: false
type: boolean
type: object
sub-domain:
default: auth
examples:
- auth
type: string
redis:
default:
exporter:
enabled: true
image: quay.io/opstree/redis-exporter:v1.44.0
image: quay.io/opstree/redis:v7.0.5
storage: 8Gi
examples:
- exporter:
enabled: true
image: quay.io/opstree/redis-exporter:v1.44.0
image: quay.io/opstree/redis:v7.0.5
storage: 8Gi
properties:
exporter:
default:
enabled: true
image: quay.io/opstree/redis-exporter:v1.44.0
properties:
enabled:
default: true
type: boolean
image:
default: quay.io/opstree/redis-exporter:v1.44.0
type: string
type: object
image:
default: quay.io/opstree/redis:v7.0.5
type: string
storage:
default: 8Gi
type: string
type: object
postgres:
default:
replicas: 1
storage: 8Gi
version: '14'
examples:
- replicas: 1
storage: 8Gi
version: '14'
properties:
replicas:
default: 1
type: integer
storage:
default: 8Gi
type: string
version:
default: '14'
type: string
type: object
domain-name:
default: your_company.com
examples:
- your_company.com
type: string
ingress-class:
default: traefik
examples:
- traefik
type: string
issuer:
default: letsencrypt-prod
examples:
- letsencrypt-prod
type: string
domain:
default: your-company
examples:
- your-company
type: string
loglevel:
default: info
examples:
- info
type: string
dependencies:
- dist: null
category: core
component: cert-manager
- dist: null
category: core
component: secret-generator
- dist: null
category: crd
component: prometheus
- dist: null
category: crd
component: traefik
- dist: null
category: dbo
component: postgresql
- dist: null
category: dbo
component: redis
providers:
kubernetes: null
authentik: true
kubectl: true
postgresql: null
restapi: null
http: null

View File

@@ -0,0 +1,75 @@
locals {
dns-names = ["${var.sub-domain}.${var.domain-name}"]
middlewares = ["${var.instance}-https"]
service = {
"name" = "${var.instance}"
"port" = {
"number" = 80
}
}
rules = [ for v in local.dns-names : {
"host" = "${v}"
"http" = {
"paths" = [{
"backend" = {
"service" = local.service
}
"path" = "/"
"pathType" = "Prefix"
}]
}
}]
}
resource "kubectl_manifest" "prj_certificate" {
yaml_body = <<-EOF
apiVersion: "cert-manager.io/v1"
kind: "Certificate"
metadata:
name: "${var.instance}"
namespace: "${var.namespace}"
labels: ${jsonencode(local.common-labels)}
spec:
secretName: "${var.instance}-cert"
dnsNames: ${jsonencode(local.dns-names)}
issuerRef:
name: "${var.issuer}"
kind: "ClusterIssuer"
group: "cert-manager.io"
EOF
}
resource "kubectl_manifest" "prj_https_redirect" {
yaml_body = <<-EOF
apiVersion: "traefik.containo.us/v1alpha1"
kind: "Middleware"
metadata:
name: "${var.instance}-https"
namespace: "${var.namespace}"
labels: ${jsonencode(local.common-labels)}
spec:
redirectScheme:
scheme: "https"
permanent: true
EOF
}
resource "kubectl_manifest" "prj_ingress" {
force_conflicts = true
yaml_body = <<-EOF
apiVersion: "networking.k8s.io/v1"
kind: "Ingress"
metadata:
name: "${var.instance}"
namespace: "${var.namespace}"
labels: ${jsonencode(local.common-labels)}
annotations:
"traefik.ingress.kubernetes.io/router.middlewares": "${join(",", [for m in local.middlewares : format("%s-%s@kubernetescrd", var.namespace, m)])}"
spec:
ingressClassName: "${var.ingress-class}"
rules: ${jsonencode(local.rules)}
tls:
- hosts: ${jsonencode(local.dns-names)}
secretName: "${var.instance}-cert"
EOF
}

View File

@@ -0,0 +1,162 @@
# Source: authentik/templates/prom-rules.yaml
apiVersion: monitoring.coreos.com/v1
kind: PrometheusRule
metadata:
name: authentik
labels:
helm.sh/chart: authentik-2023.6.3
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.1"
app.kubernetes.io/managed-by: Helm
spec:
groups:
- name: authentik Aggregate request counters
rules:
- record: job:django_http_requests_before_middlewares_total:sum_rate30s
expr: sum(rate(django_http_requests_before_middlewares_total[30s])) by (job)
- record: job:django_http_requests_unknown_latency_total:sum_rate30s
expr: sum(rate(django_http_requests_unknown_latency_total[30s])) by (job)
- record: job:django_http_ajax_requests_total:sum_rate30s
expr: sum(rate(django_http_ajax_requests_total[30s])) by (job)
- record: job:django_http_responses_before_middlewares_total:sum_rate30s
expr: sum(rate(django_http_responses_before_middlewares_total[30s])) by (job)
- record: job:django_http_requests_unknown_latency_including_middlewares_total:sum_rate30s
expr: sum(rate(django_http_requests_unknown_latency_including_middlewares_total[30s])) by (job)
- record: job:django_http_requests_body_total_bytes:sum_rate30s
expr: sum(rate(django_http_requests_body_total_bytes[30s])) by (job)
- record: job:django_http_responses_streaming_total:sum_rate30s
expr: sum(rate(django_http_responses_streaming_total[30s])) by (job)
- record: job:django_http_responses_body_total_bytes:sum_rate30s
expr: sum(rate(django_http_responses_body_total_bytes[30s])) by (job)
- record: job:django_http_requests_total:sum_rate30s
expr: sum(rate(django_http_requests_total_by_method[30s])) by (job)
- record: job:django_http_requests_total_by_method:sum_rate30s
expr: sum(rate(django_http_requests_total_by_method[30s])) by (job,method)
- record: job:django_http_requests_total_by_transport:sum_rate30s
expr: sum(rate(django_http_requests_total_by_transport[30s])) by (job,transport)
- record: job:django_http_requests_total_by_view:sum_rate30s
expr: sum(rate(django_http_requests_total_by_view_transport_method[30s])) by (job,view)
- record: job:django_http_requests_total_by_view_transport_method:sum_rate30s
expr: sum(rate(django_http_requests_total_by_view_transport_method[30s])) by (job,view,transport,method)
- record: job:django_http_responses_total_by_templatename:sum_rate30s
expr: sum(rate(django_http_responses_total_by_templatename[30s])) by (job,templatename)
- record: job:django_http_responses_total_by_status:sum_rate30s
expr: sum(rate(django_http_responses_total_by_status[30s])) by (job,status)
- record: job:django_http_responses_total_by_status_name_method:sum_rate30s
expr: sum(rate(django_http_responses_total_by_status_name_method[30s])) by (job,status,name,method)
- record: job:django_http_responses_total_by_charset:sum_rate30s
expr: sum(rate(django_http_responses_total_by_charset[30s])) by (job,charset)
- record: job:django_http_exceptions_total_by_type:sum_rate30s
expr: sum(rate(django_http_exceptions_total_by_type[30s])) by (job,type)
- record: job:django_http_exceptions_total_by_view:sum_rate30s
expr: sum(rate(django_http_exceptions_total_by_view[30s])) by (job,view)
- name: authentik Aggregate latency histograms
rules:
- record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s
expr: histogram_quantile(0.50, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le))
labels:
quantile: "50"
- record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s
expr: histogram_quantile(0.95, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le))
labels:
quantile: "95"
- record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s
expr: histogram_quantile(0.99, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le))
labels:
quantile: "99"
- record: job:django_http_requests_latency_including_middlewares_seconds:quantile_rate30s
expr: histogram_quantile(0.999, sum(rate(django_http_requests_latency_including_middlewares_seconds_bucket[30s])) by (job, le))
labels:
quantile: "99.9"
- record: job:django_http_requests_latency_seconds:quantile_rate30s
expr: histogram_quantile(0.50, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le))
labels:
quantile: "50"
- record: job:django_http_requests_latency_seconds:quantile_rate30s
expr: histogram_quantile(0.95, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le))
labels:
quantile: "95"
- record: job:django_http_requests_latency_seconds:quantile_rate30s
expr: histogram_quantile(0.99, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le))
labels:
quantile: "99"
- record: job:django_http_requests_latency_seconds:quantile_rate30s
expr: histogram_quantile(0.999, sum(rate(django_http_requests_latency_seconds_bucket[30s])) by (job, le))
labels:
quantile: "99.9"
- name: authentik Aggregate model operations
rules:
- record: job:django_model_inserts_total:sum_rate1m
expr: sum(rate(django_model_inserts_total[1m])) by (job, model)
- record: job:django_model_updates_total:sum_rate1m
expr: sum(rate(django_model_updates_total[1m])) by (job, model)
- record: job:django_model_deletes_total:sum_rate1m
expr: sum(rate(django_model_deletes_total[1m])) by (job, model)
- name: authentik Aggregate database operations
rules:
- record: job:django_db_new_connections_total:sum_rate30s
expr: sum(rate(django_db_new_connections_total[30s])) by (alias, vendor)
- record: job:django_db_new_connection_errors_total:sum_rate30s
expr: sum(rate(django_db_new_connection_errors_total[30s])) by (alias, vendor)
- record: job:django_db_execute_total:sum_rate30s
expr: sum(rate(django_db_execute_total[30s])) by (alias, vendor)
- record: job:django_db_execute_many_total:sum_rate30s
expr: sum(rate(django_db_execute_many_total[30s])) by (alias, vendor)
- record: job:django_db_errors_total:sum_rate30s
expr: sum(rate(django_db_errors_total[30s])) by (alias, vendor, type)
- name: authentik Aggregate migrations
rules:
- record: job:django_migrations_applied_total:max
expr: max(django_migrations_applied_total) by (job, connection)
- record: job:django_migrations_unapplied_total:max
expr: max(django_migrations_unapplied_total) by (job, connection)
- name: authentik Alerts
rules:
- alert: NoWorkersConnected
labels:
severity: critical
expr: max without (pid) (authentik_admin_workers) < 1
for: 10m
annotations:
summary: No workers connected
message: authentik instance {{ $labels.instance }}'s worker are either not running or not connected.
- alert: PendingMigrations
labels:
severity: critical
expr: max without (pid) (django_migrations_unapplied_total) > 0
for: 10m
annotations:
summary: Pending database migrations
message: authentik instance {{ $labels.instance }} has pending database migrations
- alert: FailedSystemTasks
labels:
severity: critical
expr: sum(increase(authentik_system_tasks{status="error"}[2h])) > 0
for: 2h
annotations:
summary: Failed system tasks
message: System task {{ $labels.task_name }} has failed
- alert: DisconnectedOutposts
labels:
severity: critical
expr: sum by (outpost) (max without (pid) (authentik_outposts_connected{uid!~"specific.*"})) < 1
for: 30m
annotations:
summary: Disconnected outpost
message: Outpost {{ $labels.outpost }} has at least 1 disconnected instance

View File

@@ -0,0 +1,20 @@
# Source: authentik/templates/prom-service-monitor.yaml
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: authentik
labels:
helm.sh/chart: authentik-2023.6.3
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.1"
app.kubernetes.io/managed-by: Helm
spec:
endpoints:
- port: http-metrics
scrapeTimeout: 3s
interval: 30s
selector:
matchLabels:
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik

View File

@@ -0,0 +1,26 @@
resource "kubectl_manifest" "authentik_postgresql" {
yaml_body = <<-EOF
apiVersion: "acid.zalan.do/v1"
kind: "postgresql"
metadata:
name: "${var.instance}-${var.component}"
namespace: "${var.namespace}"
labels: ${jsonencode(local.common-labels)}
spec:
databases:
${var.component}: "${var.component}"
numberOfInstances: ${var.postgres.replicas}
podAnnotations:
"k8up.io/backupcommand": "pg_dump -U postgres -d ${var.component} --clean"
"k8up.io/file-extension": ".sql"
postgresql:
version: "${var.postgres.version}"
teamId: "${var.instance}"
users:
${var.component}:
- "superuser"
- "createdb"
volume:
size: "${var.postgres.storage}"
EOF
}

View File

@@ -0,0 +1,19 @@
# Source: authentik/charts/serviceAccount/templates/cluster-role-binding.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: authentik-vynil-auth
labels:
helm.sh/chart: serviceAccount-1.2.2
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.0"
app.kubernetes.io/managed-by: Helm
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: authentik-vynil-auth
subjects:
- kind: ServiceAccount
name: authentik
namespace: vynil-auth

View File

@@ -0,0 +1,18 @@
# Source: authentik/charts/serviceAccount/templates/cluster-role.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: authentik-vynil-auth
labels:
helm.sh/chart: serviceAccount-1.2.2
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.0"
app.kubernetes.io/managed-by: Helm
rules:
- apiGroups:
- apiextensions.k8s.io
resources:
- customresourcedefinitions
verbs:
- list

View File

@@ -0,0 +1,20 @@
# Source: authentik/charts/serviceAccount/templates/role-binding.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: authentik
namespace: vynil-auth
labels:
helm.sh/chart: serviceAccount-1.2.2
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.0"
app.kubernetes.io/managed-by: Helm
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: authentik
subjects:
- kind: ServiceAccount
name: authentik
namespace: vynil-auth

View File

@@ -0,0 +1,74 @@
# Source: authentik/charts/serviceAccount/templates/role.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: authentik
namespace: vynil-auth
labels:
helm.sh/chart: serviceAccount-1.2.2
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.0"
app.kubernetes.io/managed-by: Helm
rules:
- apiGroups:
- ""
resources:
- secrets
- services
- configmaps
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- extensions
- apps
resources:
- deployments
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- extensions
- networking.k8s.io
resources:
- ingresses
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- traefik.containo.us
- traefik.io
resources:
- middlewares
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- monitoring.coreos.com
resources:
- servicemonitors
verbs:
- get
- create
- delete
- list
- patch
- apiGroups:
- apiextensions.k8s.io
resources:
- customresourcedefinitions
verbs:
- list

30
share/authentik/redis.tf Normal file
View File

@@ -0,0 +1,30 @@
resource "kubectl_manifest" "authentik_redis" {
yaml_body = <<-EOF
apiVersion: "redis.redis.opstreelabs.in/v1beta1"
kind: "Redis"
metadata:
name: "${var.name}-${var.component}-redis"
namespace: "${var.namespace}"
labels: ${jsonencode(local.common-labels)}
spec:
kubernetesConfig:
image: "${var.redis.image}"
imagePullPolicy: "IfNotPresent"
redisSecret:
name: "${var.component}"
key: "AUTHENTIK_REDIS__PASSWORD"
storage:
volumeClaimTemplate:
spec:
accessModes: ["ReadWriteOnce"]
resources:
requests:
storage: "${var.redis.storage}"
redisExporter:
enabled: ${var.redis.exporter.enabled}
image: "${var.redis.exporter.image}"
securityContext:
runAsUser: 1000
fsGroup: 1000
EOF
}

23
share/authentik/secret.tf Normal file
View File

@@ -0,0 +1,23 @@
resource "kubectl_manifest" "authentik_secret" {
ignore_fields = ["metadata.annotations"]
yaml_body = <<-EOF
apiVersion: "secretgenerator.mittwald.de/v1alpha1"
kind: "StringSecret"
metadata:
name: "${var.component}"
namespace: "${var.namespace}"
labels: ${jsonencode(local.common-labels)}
spec:
forceRegenerate: false
fields:
- fieldName: "AUTHENTIK_SECRET_KEY"
length: "128"
- fieldName: "AUTHENTIK_BOOTSTRAP_PASSWORD"
length: "32"
- fieldName: "AUTHENTIK_BOOTSTRAP_TOKEN"
length: "64"
- fieldName: "AUTHENTIK_REDIS__PASSWORD"
length: "32"
EOF
}

View File

@@ -0,0 +1,12 @@
# Source: authentik/charts/serviceAccount/templates/service-account.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: authentik
namespace: vynil-auth
labels:
helm.sh/chart: serviceAccount-1.2.2
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.0"
app.kubernetes.io/managed-by: Helm

View File

@@ -0,0 +1,26 @@
# Source: authentik/templates/service.yaml
apiVersion: v1
kind: Service
metadata:
name: authentik
labels:
helm.sh/chart: authentik-2023.6.3
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/version: "2023.6.1"
app.kubernetes.io/managed-by: Helm
spec:
type: ClusterIP
ports:
- port: 9300
name: http-metrics
protocol: TCP
targetPort: http-metrics
- port: 80
targetPort: http
protocol: TCP
name: http
selector:
app.kubernetes.io/name: authentik
app.kubernetes.io/instance: authentik
app.kubernetes.io/component: "server"

View File

@@ -0,0 +1,89 @@
# Source: coredns/templates/deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: coredns-coredns
labels:
app.kubernetes.io/managed-by: "Helm"
app.kubernetes.io/instance: "coredns"
helm.sh/chart: "coredns-1.24.1"
k8s-app: coredns
kubernetes.io/cluster-service: "true"
kubernetes.io/name: "CoreDNS"
app.kubernetes.io/name: coredns
app.kubernetes.io/version: "1.10.1"
spec:
replicas: 1
strategy:
type: RollingUpdate
rollingUpdate:
maxUnavailable: 1
maxSurge: 25%
selector:
matchLabels:
app.kubernetes.io/instance: "coredns"
k8s-app: coredns
app.kubernetes.io/name: coredns
template:
metadata:
labels:
k8s-app: coredns
app.kubernetes.io/name: coredns
app.kubernetes.io/instance: "coredns"
annotations:
checksum/config: 2c80ea26dcf7cd4d57c4ccbe0561210d06f8e048704a7edb5c495e4e2d60999d
scheduler.alpha.kubernetes.io/tolerations: '[{"key":"CriticalAddonsOnly", "operator":"Exists"}]'
spec:
terminationGracePeriodSeconds: 30
serviceAccountName: coredns-coredns
dnsPolicy: Default
containers:
- name: "coredns"
image: "coredns/coredns:1.10.1"
imagePullPolicy: IfNotPresent
args: [ "-conf", "/etc/coredns/Corefile" ]
volumeMounts:
- name: config-volume
mountPath: /etc/coredns
resources:
limits:
cpu: 100m
memory: 128Mi
requests:
cpu: 100m
memory: 128Mi
ports:
- {"containerPort":53,"name":"udp-53","protocol":"UDP"}
- {"containerPort":53,"name":"tcp-53","protocol":"TCP"}
livenessProbe:
httpGet:
path: /health
port: 8080
scheme: HTTP
initialDelaySeconds: 60
periodSeconds: 10
timeoutSeconds: 5
successThreshold: 1
failureThreshold: 5
readinessProbe:
httpGet:
path: /ready
port: 8181
scheme: HTTP
initialDelaySeconds: 30
periodSeconds: 10
timeoutSeconds: 5
successThreshold: 1
failureThreshold: 5
securityContext:
capabilities:
add:
- NET_BIND_SERVICE
volumes:
- name: config-volume
configMap:
name: coredns-coredns
items:
- key: Corefile
path: Corefile

52
share/dns/config.tf Normal file
View File

@@ -0,0 +1,52 @@
locals {
begin-core = <<-EOF
.:53 {
errors {
consolidate 5m ".* i/o timeout$" warning
consolidate 30s "^Failed to .+"
}
health {
lameduck 5s
}
ready
EOF
end-core = <<-EOF
}
EOF
soa-ns = <<-EOF
@ IN SOA ${var.sub-domain}.${var.domain-name}. ${var.domain-name}. (
${formatdate("YYYYMMDDhh",timestamp())} ; Serial
4H ; Refresh
1H ; Retry
7D ; Expire
4H ) ; Negative Cache TTL
@ IN NS ${var.sub-domain}.${var.domain-name}.
EOF
files = merge({
"Corefile" = join("", concat([local.begin-core],[for z in var.zones: format("file /etc/coredns/%s.db %s", z.name,z.name)],[local.end-core]))
},[for z in var.zones: {
"${z.name}" = join("\n", concat([
"$TTL 60",
"$ORIGIN ${z.name}.",
local.soa-ns
],
[for k,v in z.hosts: format("%s IN A %s", k, v)],
[for k,v in z.hosts6: format("%s IN AAAA %s", k, v)],
[for k,v in z.alias: format("%s IN CNAME %s", k, v)],
z.wildcard!=""?[format("*.%s. IN A %s", z.name, z.wildcard)]:[],
z.wildcard6!=""?[format("*.%s. IN AAAA %s", z.namz, z.wildcard6)]:[],
))
}]...)
}
resource "kubectl_manifest" "coredns-config" {
yaml_body = <<-EOF
apiVersion: v1
kind: ConfigMap
metadata:
name: "${var.component}-${var.instance}"
namespace: "${var.namespace}"
labels: ${jsonencode(local.common-labels)}
data: ${jsonencode(local.files)}
EOF
}

53
share/dns/datas.tf Normal file
View File

@@ -0,0 +1,53 @@
locals {
common-labels = {
"vynil.solidite.fr/owner-name" = var.instance
"vynil.solidite.fr/owner-namespace" = var.namespace
"vynil.solidite.fr/owner-category" = var.category
"vynil.solidite.fr/owner-component" = var.component
"app.kubernetes.io/managed-by" = "vynil"
"app.kubernetes.io/name" = var.component
"app.kubernetes.io/instance" = var.instance
}
items = concat([{
"key" = "Corefile"
"path" = "Corefile"
}],[for z in var.zones: {
"key" = z.name
"path" = z.name
}])
}
data "kustomization_overlay" "data" {
namespace = var.namespace
common_labels = local.common-labels
resources = [for file in fileset(path.module, "*.yaml"): file if file != "index.yaml"]
images {
name = "coredns/coredns"
new_name = "${var.image.registry}/${var.image.repository}"
new_tag = "${var.image.tag}"
}
patches {
target {
kind = "Deployment"
name = "coredns-coredns"
}
patch = <<-EOF
apiVersion: apps/v1
kind: Deployment
metadata:
name: coredns-coredns
spec:
template:
spec:
containers:
- name: coredns
image: "${var.image.registry}/${var.image.repository}:${var.image.tag}"
imagePullPolicy: "${var.image.pullPolicy}"
volumes:
- name: config-volume
configMap:
name: "${var.component}-${var.instance}"
items: ${jsonencode(local.items)}
EOF
}
}

84
share/dns/index.yaml Normal file
View File

@@ -0,0 +1,84 @@
---
apiVersion: vinyl.solidite.fr/v1beta1
kind: Component
category: share
metadata:
name: dns
description: null
options:
domain:
default: your-company
examples:
- your-company
type: string
sub-domain:
default: dns
examples:
- dns
type: string
zones:
default: []
items:
properties:
alias:
default: {}
type: object
hosts:
default: {}
type: object
hosts6:
default: {}
type: object
name:
default: local.domain
type: string
wildcard:
default: ''
type: string
wildcard6:
default: ''
type: string
type: object
type: array
image:
default:
pullPolicy: IfNotPresent
registry: docker.io
repository: coredns/coredns
tag: 1.10.1
examples:
- pullPolicy: IfNotPresent
registry: docker.io
repository: coredns/coredns
tag: 1.10.1
properties:
pullPolicy:
default: IfNotPresent
enum:
- Always
- Never
- IfNotPresent
type: string
registry:
default: docker.io
type: string
repository:
default: coredns/coredns
type: string
tag:
default: 1.10.1
type: string
type: object
domain-name:
default: your_company.com
examples:
- your_company.com
type: string
dependencies: []
providers:
kubernetes: true
authentik: null
kubectl: null
postgresql: null
restapi: null
http: null

View File

@@ -0,0 +1,21 @@
# Source: coredns/templates/clusterrolebinding.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: coredns-coredns
labels:
app.kubernetes.io/managed-by: "Helm"
app.kubernetes.io/instance: "coredns"
helm.sh/chart: "coredns-1.24.1"
k8s-app: coredns
kubernetes.io/cluster-service: "true"
kubernetes.io/name: "CoreDNS"
app.kubernetes.io/name: coredns
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: coredns-coredns
subjects:
- kind: ServiceAccount
name: coredns-coredns
namespace: vynil-infra

View File

@@ -0,0 +1,31 @@
# Source: coredns/templates/clusterrole.yaml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: coredns-coredns
labels:
app.kubernetes.io/managed-by: "Helm"
app.kubernetes.io/instance: "coredns"
helm.sh/chart: "coredns-1.24.1"
k8s-app: coredns
kubernetes.io/cluster-service: "true"
kubernetes.io/name: "CoreDNS"
app.kubernetes.io/name: coredns
rules:
- apiGroups:
- ""
resources:
- endpoints
- services
- pods
- namespaces
verbs:
- list
- watch
- apiGroups:
- discovery.k8s.io
resources:
- endpointslices
verbs:
- list
- watch

View File

@@ -0,0 +1,13 @@
# Source: coredns/templates/serviceaccount.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: coredns-coredns
labels:
app.kubernetes.io/managed-by: "Helm"
app.kubernetes.io/instance: "coredns"
helm.sh/chart: "coredns-1.24.1"
k8s-app: coredns
kubernetes.io/cluster-service: "true"
kubernetes.io/name: "CoreDNS"
app.kubernetes.io/name: coredns

View File

@@ -0,0 +1,26 @@
# Source: coredns/templates/service-metrics.yaml
apiVersion: v1
kind: Service
metadata:
name: coredns-coredns-metrics
labels:
app.kubernetes.io/managed-by: "Helm"
app.kubernetes.io/instance: "coredns"
helm.sh/chart: "coredns-1.24.1"
k8s-app: coredns
kubernetes.io/cluster-service: "true"
kubernetes.io/name: "CoreDNS"
app.kubernetes.io/name: coredns
app.kubernetes.io/component: metrics
annotations:
prometheus.io/port: "9153"
prometheus.io/scrape: "true"
spec:
selector:
app.kubernetes.io/instance: "coredns"
k8s-app: coredns
app.kubernetes.io/name: coredns
ports:
- name: metrics
port: 9153
targetPort: 9153

View File

@@ -0,0 +1,22 @@
# Source: coredns/templates/service.yaml
apiVersion: v1
kind: Service
metadata:
name: coredns-coredns
labels:
app.kubernetes.io/managed-by: "Helm"
app.kubernetes.io/instance: "coredns"
helm.sh/chart: "coredns-1.24.1"
k8s-app: coredns
kubernetes.io/cluster-service: "true"
kubernetes.io/name: "CoreDNS"
app.kubernetes.io/name: coredns
spec:
selector:
app.kubernetes.io/instance: "coredns"
k8s-app: coredns
app.kubernetes.io/name: coredns
ports:
- {"name":"udp-53","port":53,"protocol":"UDP"}
- {"name":"tcp-53","port":53,"protocol":"TCP"}
type: LoadBalancer