mirror of
https://github.com/auricom/home-cluster.git
synced 2025-09-17 18:24:14 +02:00
⬆️ update go-tasks
This commit is contained in:
@@ -12,7 +12,7 @@ spec:
|
||||
restartPolicy: OnFailure
|
||||
containers:
|
||||
- name: list
|
||||
image: docker.io/restic/restic:0.14.0
|
||||
image: docker.io/restic/restic:0.16.0
|
||||
args: ["snapshots"]
|
||||
envFrom:
|
||||
- secretRef:
|
||||
|
@@ -11,10 +11,21 @@ spec:
|
||||
repository: "${rsrc}-restic-secret"
|
||||
destinationPVC: "${claim}"
|
||||
copyMethod: Direct
|
||||
storageClassName: rook-ceph-block
|
||||
# IMPORTANT NOTE:
|
||||
# Set to the last X number of snapshots to restore from
|
||||
previous: ${previous}
|
||||
# OR;
|
||||
# IMPORTANT NOTE:
|
||||
# On bootstrap set `restoreAsOf` to the time the old cluster was destroyed.
|
||||
# This will essentially prevent volsync from trying to restore a backup
|
||||
# from a application that started with default data in the PVC.
|
||||
# Do not restore snapshots made after the following RFC3339 Timestamp.
|
||||
# date --rfc-3339=seconds (--utc)
|
||||
# restoreAsOf: "2022-12-27T01:00:00-05:00"
|
||||
# restoreAsOf: "2022-12-10T16:00:00-05:00"
|
||||
#
|
||||
# MIGHT BE NEEDED
|
||||
# moverSecurityContext:
|
||||
# runAsUser: 568
|
||||
# runAsGroup: 568
|
||||
# fsGroup: 568
|
||||
|
@@ -8,21 +8,23 @@ x-task-vars: &task-vars
|
||||
claim: '{{.claim}}'
|
||||
ts: '{{.ts}}'
|
||||
kustomization: '{{.kustomization}}'
|
||||
previous: '{{.previous}}'
|
||||
|
||||
vars:
|
||||
destinationTemplate: "{{.PROJECT_DIR}}/.taskfiles/VolSync/ReplicationDestination.tmpl.yaml"
|
||||
wipeJobTemplate: "{{.PROJECT_DIR}}/.taskfiles/VolSync/WipeJob.tmpl.yaml"
|
||||
waitForJobScript: "{{.PROJECT_DIR}}/.taskfiles/VolSync/wait-for-job.sh"
|
||||
listJobTemplate: "{{.PROJECT_DIR}}/.taskfiles/VolSync/ListJob.tmpl.yaml"
|
||||
destinationTemplate: "{{.ROOT_DIR}}/.taskfiles/VolSync/ReplicationDestination.tmpl.yaml"
|
||||
wipeJobTemplate: "{{.ROOT_DIR}}/.taskfiles/VolSync/WipeJob.tmpl.yaml"
|
||||
waitForJobScript: "{{.ROOT_DIR}}/.taskfiles/VolSync/wait-for-job.sh"
|
||||
listJobTemplate: "{{.ROOT_DIR}}/.taskfiles/VolSync/ListJob.tmpl.yaml"
|
||||
unlockJobTemplate: "{{.ROOT_DIR}}/.taskfiles/VolSync/UnlockJob.tmpl.yaml"
|
||||
ts: '{{now | date "150405"}}'
|
||||
|
||||
tasks:
|
||||
|
||||
list:
|
||||
desc: List all snapshots taken by restic for a given ReplicationSource (ex. task vs:list rsrc=plex [namespace=default])
|
||||
desc: List all snapshots taken by restic for a given ReplicationSource (ex. task volsync:list rsrc=plex [namespace=default])
|
||||
silent: true
|
||||
cmds:
|
||||
- envsubst < {{.listJobTemplate}} | kubectl apply -f -
|
||||
- envsubst < <(cat {{.listJobTemplate}}) | kubectl apply -f -
|
||||
- bash {{.waitForJobScript}} list-{{.rsrc}}-{{.ts}} {{.namespace}}
|
||||
- kubectl -n {{.namespace}} wait job/list-{{.rsrc}}-{{.ts}} --for condition=complete --timeout=1m
|
||||
- kubectl -n {{.namespace}} logs job/list-{{.rsrc}}-{{.ts}} --container list
|
||||
@@ -35,16 +37,34 @@ tasks:
|
||||
- sh: test -f {{.waitForJobScript}}
|
||||
- sh: test -f {{.listJobTemplate}}
|
||||
|
||||
unlock:
|
||||
desc: Unlocks restic repository for a given ReplicationSource (ex. task volsync:unlock rsrc=plex [namespace=default])
|
||||
silent: true
|
||||
cmds:
|
||||
- envsubst < <(cat {{.unlockJobTemplate}}) | kubectl apply -f -
|
||||
- bash {{.waitForJobScript}} unlock-{{.rsrc}}-{{.ts}} {{.namespace}}
|
||||
- kubectl -n {{.namespace}} wait job/unlock-{{.rsrc}}-{{.ts}} --for condition=complete --timeout=1m
|
||||
- kubectl -n {{.namespace}} logs job/unlock-{{.rsrc}}-{{.ts}} --container unlock
|
||||
- kubectl -n {{.namespace}} delete job unlock-{{.rsrc}}-{{.ts}}
|
||||
vars:
|
||||
rsrc: '{{ or .rsrc (fail "ReplicationSource `rsrc` is required") }}'
|
||||
namespace: '{{.namespace | default "default"}}'
|
||||
env: *task-vars
|
||||
preconditions:
|
||||
- sh: test -f {{.waitForJobScript}}
|
||||
- sh: test -f {{.unlockJobTemplate}}
|
||||
|
||||
# To run backup jobs in parallel for all replicationsources:
|
||||
# - kubectl get replicationsources --all-namespaces --no-headers | awk '{print $2, $1}' | xargs --max-procs=4 -l bash -c 'task vs:snapshot rsrc=$0 namespace=$1'
|
||||
# - kubectl get replicationsources --all-namespaces --no-headers | awk '{print $2, $1}' | xargs --max-procs=4 -l bash -c 'task volsync:snapshot rsrc=$0 namespace=$1'
|
||||
#
|
||||
snapshot:
|
||||
desc: Trigger a Restic ReplicationSource snapshot (ex. task vs:snapshot rsrc=plex [namespace=default])
|
||||
desc: Trigger a Restic ReplicationSource snapshot (ex. task volsync:snapshot rsrc=plex [namespace=default])
|
||||
cmds:
|
||||
- kubectl -n {{.namespace}} patch replicationsources {{.rsrc}} --type merge -p '{"spec":{"trigger":{"manual":"{{.ts}}"}}}'
|
||||
- bash {{.waitForJobScript}} volsync-src-{{.rsrc}} {{.namespace}}
|
||||
- kubectl -n {{.namespace}} wait job/volsync-src-{{.rsrc}} --for condition=complete --timeout=120m
|
||||
# TODO: Error from server (NotFound): jobs.batch "volsync-src-zzztest" not found
|
||||
# TODO: Find a way to output logs
|
||||
# Error from server (NotFound): jobs.batch "volsync-src-zzztest" not found
|
||||
# - kubectl -n {{.namespace}} logs job/volsync-src-{{.rsrc}}
|
||||
vars:
|
||||
rsrc: '{{ or .rsrc (fail "ReplicationSource `rsrc` is required") }}'
|
||||
@@ -56,10 +76,10 @@ tasks:
|
||||
msg: "ReplicationSource '{{.rsrc}}' not found in namespace '{{.namespace}}'"
|
||||
|
||||
# To run restore jobs in parallel for all replicationdestinations:
|
||||
# - kubectl get replicationsources --all-namespaces --no-headers | awk '{print $2, $1}' | xargs --max-procs=4 -l bash -c 'task vs:restore rsrc=$0 namespace=$1'
|
||||
# - kubectl get replicationsources --all-namespaces --no-headers | awk '{print $2, $1}' | xargs --max-procs=2 -l bash -c 'task volsync:restore rsrc=$0 namespace=$1'
|
||||
#
|
||||
restore:
|
||||
desc: Trigger a Restic ReplicationSource restore (ex. task vs:restore rsrc=plex [namespace=default])
|
||||
desc: Trigger a Restic ReplicationSource restore (ex. task volsync:restore rsrc=plex [namespace=default])
|
||||
cmds:
|
||||
- task: restore-suspend-app
|
||||
vars: *task-vars
|
||||
@@ -86,10 +106,12 @@ tasks:
|
||||
controller:
|
||||
sh: |
|
||||
app=$(kubectl -n {{.namespace}} get persistentvolumeclaim {{.claim}} -o jsonpath="{.metadata.labels.app\.kubernetes\.io/name}")
|
||||
if [[ $(kubectl -n {{.namespace}} get deployment ${app}) ]]; then
|
||||
echo "deployments.apps/$app" && exit 0
|
||||
if kubectl -n {{ .namespace }} get deployment.apps/$app >/dev/null 2>&1 ; then
|
||||
echo "deployment.apps/$app"
|
||||
else
|
||||
echo "statefulset.apps/$app"
|
||||
fi
|
||||
echo "statefulsets.apps/$app"
|
||||
previous: "{{.previous | default 2}}"
|
||||
env: *task-vars
|
||||
preconditions:
|
||||
- sh: test -f {{.wipeJobTemplate}}
|
||||
|
19
.taskfiles/VolSync/UnlockJob.tmpl.yaml
Normal file
19
.taskfiles/VolSync/UnlockJob.tmpl.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
---
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: "unlock-${rsrc}-${ts}"
|
||||
namespace: "${namespace}"
|
||||
spec:
|
||||
ttlSecondsAfterFinished: 3600
|
||||
template:
|
||||
spec:
|
||||
automountServiceAccountToken: false
|
||||
restartPolicy: OnFailure
|
||||
containers:
|
||||
- name: unlock
|
||||
image: docker.io/restic/restic:0.16.0
|
||||
args: ["unlock", "--remove-all"]
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: "${rsrc}-restic-secret"
|
@@ -12,8 +12,8 @@ spec:
|
||||
restartPolicy: OnFailure
|
||||
containers:
|
||||
- name: wipe
|
||||
image: ghcr.io/onedr0p/alpine:3.17.0@sha256:8e1eb13c3ca5c038f3bf22a5fe9e354867f97f98a78027c44b7c76fce81fa61d
|
||||
command: ["/bin/bash", "-c", "cd /config; find . -delete"]
|
||||
image: public.ecr.aws/docker/library/busybox:latest
|
||||
command: ["/bin/sh", "-c", "cd /config; find . -delete"]
|
||||
volumeMounts:
|
||||
- name: config
|
||||
mountPath: /config
|
||||
|
Reference in New Issue
Block a user