fix
This commit is contained in:
38
apps/dolibarr/backups.tf
Normal file
38
apps/dolibarr/backups.tf
Normal file
@@ -0,0 +1,38 @@
|
||||
resource "kubectl_manifest" "backup_schedule" {
|
||||
count = var.backups.enable ? 1 : 0
|
||||
yaml_body = <<-EOF
|
||||
apiVersion: k8up.io/v1
|
||||
kind: Schedule
|
||||
metadata:
|
||||
name: "${var.instance}-backup"
|
||||
namespace: "${var.namespace}"
|
||||
labels: ${jsonencode(local.common-labels)}
|
||||
spec:
|
||||
backend:
|
||||
repoPasswordSecretRef:
|
||||
key: "${var.backups.restic-key}"
|
||||
name: "${var.backups.secret-name}"
|
||||
s3:
|
||||
accessKeyIDSecretRef:
|
||||
key: "${var.backups.key-id-key}"
|
||||
name: "${var.backups.secret-name}"
|
||||
bucket: "${var.instance}-${var.namespace}"
|
||||
endpoint: "${var.backups.endpoint}/restic"
|
||||
secretAccessKeySecretRef:
|
||||
key: "${var.backups.secret-key}"
|
||||
name: "${var.backups.secret-name}"
|
||||
backup:
|
||||
schedule: "${var.backups.schedule.backup}"
|
||||
failedJobsHistoryLimit: 2
|
||||
successfulJobsHistoryLimit: 2
|
||||
check:
|
||||
schedule: "${var.backups.schedule.check}"
|
||||
prune:
|
||||
retention:
|
||||
keepDaily: ${var.backups.retention.keepDaily}
|
||||
keepMonthly: ${var.backups.retention.keepMonthly}
|
||||
keepWeekly: ${var.backups.retention.keepWeekly}
|
||||
keepYearly: ${var.backups.retention.keepYearly}
|
||||
schedule: "${var.backups.schedule.prune}"
|
||||
EOF
|
||||
}
|
||||
38
apps/gitea/backups.tf
Normal file
38
apps/gitea/backups.tf
Normal file
@@ -0,0 +1,38 @@
|
||||
resource "kubectl_manifest" "backup_schedule" {
|
||||
count = var.backups.enable ? 1 : 0
|
||||
yaml_body = <<-EOF
|
||||
apiVersion: k8up.io/v1
|
||||
kind: Schedule
|
||||
metadata:
|
||||
name: "${var.instance}-backup"
|
||||
namespace: "${var.namespace}"
|
||||
labels: ${jsonencode(local.common-labels)}
|
||||
spec:
|
||||
backend:
|
||||
repoPasswordSecretRef:
|
||||
key: "${var.backups.restic-key}"
|
||||
name: "${var.backups.secret-name}"
|
||||
s3:
|
||||
accessKeyIDSecretRef:
|
||||
key: "${var.backups.key-id-key}"
|
||||
name: "${var.backups.secret-name}"
|
||||
bucket: "${var.instance}-${var.namespace}"
|
||||
endpoint: "${var.backups.endpoint}/restic"
|
||||
secretAccessKeySecretRef:
|
||||
key: "${var.backups.secret-key}"
|
||||
name: "${var.backups.secret-name}"
|
||||
backup:
|
||||
schedule: "${var.backups.schedule.backup}"
|
||||
failedJobsHistoryLimit: 2
|
||||
successfulJobsHistoryLimit: 2
|
||||
check:
|
||||
schedule: "${var.backups.schedule.check}"
|
||||
prune:
|
||||
retention:
|
||||
keepDaily: ${var.backups.retention.keepDaily}
|
||||
keepMonthly: ${var.backups.retention.keepMonthly}
|
||||
keepWeekly: ${var.backups.retention.keepWeekly}
|
||||
keepYearly: ${var.backups.retention.keepYearly}
|
||||
schedule: "${var.backups.schedule.prune}"
|
||||
EOF
|
||||
}
|
||||
38
apps/nextcloud/backups.tf
Normal file
38
apps/nextcloud/backups.tf
Normal file
@@ -0,0 +1,38 @@
|
||||
resource "kubectl_manifest" "backup_schedule" {
|
||||
count = var.backups.enable ? 1 : 0
|
||||
yaml_body = <<-EOF
|
||||
apiVersion: k8up.io/v1
|
||||
kind: Schedule
|
||||
metadata:
|
||||
name: "${var.instance}-backup"
|
||||
namespace: "${var.namespace}"
|
||||
labels: ${jsonencode(local.common-labels)}
|
||||
spec:
|
||||
backend:
|
||||
repoPasswordSecretRef:
|
||||
key: "${var.backups.restic-key}"
|
||||
name: "${var.backups.secret-name}"
|
||||
s3:
|
||||
accessKeyIDSecretRef:
|
||||
key: "${var.backups.key-id-key}"
|
||||
name: "${var.backups.secret-name}"
|
||||
bucket: "${var.instance}-${var.namespace}"
|
||||
endpoint: "${var.backups.endpoint}/restic"
|
||||
secretAccessKeySecretRef:
|
||||
key: "${var.backups.secret-key}"
|
||||
name: "${var.backups.secret-name}"
|
||||
backup:
|
||||
schedule: "${var.backups.schedule.backup}"
|
||||
failedJobsHistoryLimit: 2
|
||||
successfulJobsHistoryLimit: 2
|
||||
check:
|
||||
schedule: "${var.backups.schedule.check}"
|
||||
prune:
|
||||
retention:
|
||||
keepDaily: ${var.backups.retention.keepDaily}
|
||||
keepMonthly: ${var.backups.retention.keepMonthly}
|
||||
keepWeekly: ${var.backups.retention.keepWeekly}
|
||||
keepYearly: ${var.backups.retention.keepYearly}
|
||||
schedule: "${var.backups.schedule.prune}"
|
||||
EOF
|
||||
}
|
||||
@@ -6,6 +6,56 @@ metadata:
|
||||
name: wildduck
|
||||
description: null
|
||||
options:
|
||||
domain-name:
|
||||
default: your_company.com
|
||||
examples:
|
||||
- your_company.com
|
||||
type: string
|
||||
issuer:
|
||||
default: letsencrypt-prod
|
||||
examples:
|
||||
- letsencrypt-prod
|
||||
type: string
|
||||
backups:
|
||||
default:
|
||||
enable: false
|
||||
endpoint: ''
|
||||
key-id-key: s3-id
|
||||
secret-key: s3-secret
|
||||
secret-name: backup-settings
|
||||
examples:
|
||||
- enable: false
|
||||
endpoint: ''
|
||||
key-id-key: s3-id
|
||||
secret-key: s3-secret
|
||||
secret-name: backup-settings
|
||||
properties:
|
||||
enable:
|
||||
default: false
|
||||
type: boolean
|
||||
endpoint:
|
||||
default: ''
|
||||
type: string
|
||||
key-id-key:
|
||||
default: s3-id
|
||||
type: string
|
||||
secret-key:
|
||||
default: s3-secret
|
||||
type: string
|
||||
secret-name:
|
||||
default: backup-settings
|
||||
type: string
|
||||
type: object
|
||||
domain:
|
||||
default: your-company
|
||||
examples:
|
||||
- your-company
|
||||
type: string
|
||||
sub-domain:
|
||||
default: mail
|
||||
examples:
|
||||
- mail
|
||||
type: string
|
||||
redis:
|
||||
default:
|
||||
exporter:
|
||||
@@ -202,66 +252,16 @@ options:
|
||||
type: string
|
||||
type: object
|
||||
type: object
|
||||
backups:
|
||||
default:
|
||||
enable: false
|
||||
endpoint: ''
|
||||
key-id-key: s3-id
|
||||
secret-key: s3-secret
|
||||
secret-name: backup-settings
|
||||
ingress-class:
|
||||
default: traefik
|
||||
examples:
|
||||
- enable: false
|
||||
endpoint: ''
|
||||
key-id-key: s3-id
|
||||
secret-key: s3-secret
|
||||
secret-name: backup-settings
|
||||
properties:
|
||||
enable:
|
||||
default: false
|
||||
type: boolean
|
||||
endpoint:
|
||||
default: ''
|
||||
type: string
|
||||
key-id-key:
|
||||
default: s3-id
|
||||
type: string
|
||||
secret-key:
|
||||
default: s3-secret
|
||||
type: string
|
||||
secret-name:
|
||||
default: backup-settings
|
||||
type: string
|
||||
type: object
|
||||
sub-domain:
|
||||
default: mail
|
||||
examples:
|
||||
- mail
|
||||
- traefik
|
||||
type: string
|
||||
additional-domains:
|
||||
default: []
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
domain-name:
|
||||
default: your_company.com
|
||||
examples:
|
||||
- your_company.com
|
||||
type: string
|
||||
issuer:
|
||||
default: letsencrypt-prod
|
||||
examples:
|
||||
- letsencrypt-prod
|
||||
type: string
|
||||
ingress-class:
|
||||
default: traefik
|
||||
examples:
|
||||
- traefik
|
||||
type: string
|
||||
domain:
|
||||
default: your-company
|
||||
examples:
|
||||
- your-company
|
||||
type: string
|
||||
dependencies:
|
||||
- dist: null
|
||||
category: dbo
|
||||
|
||||
@@ -99,7 +99,7 @@ resource "kubectl_manifest" "webmail_config" {
|
||||
logoutRedirect = "http:/127.0.0.1:3000/logout" # URL to redirect when user clicks on "log out"
|
||||
|
||||
[api]
|
||||
url="http://wildduck.vynil-mail.svc.cluster.local:80"
|
||||
url="http://${var.instance}-wildduck-api.${var.namespace}.svc.cluster.local:80"
|
||||
accessToken="wildduck1234"
|
||||
|
||||
[dbs]
|
||||
|
||||
Reference in New Issue
Block a user