Replace backup script with autorestic

This commit is contained in:
Tobias Reisinger 2024-10-06 01:59:46 +02:00
parent 13084e3558
commit ed51a86935
Signed by: serguzim
GPG key ID: 13AD60C237A28DFE
17 changed files with 180 additions and 224 deletions

View file

@ -32,8 +32,9 @@ all_services:
dns:
- domain: serguzim.me
target: forgejo
volumes_backup:
- forgejo_data
backup:
- name: forgejo_data
type: docker
- name: forgejo_runner
host: node002
@ -46,32 +47,38 @@ all_services:
dns:
- domain: serguzim.me
target: inventory
volumes_backup:
- homebox_data
backup:
- name: homebox_data
type: docker
- name: immich
host: node002
dns:
- domain: serguzim.me
target: gallery
volumes_backup:
- immich_upload
backup:
- name: immich_upload
type: docker
- name: immich_database
type: hook
- name: influxdb
host: node002
dns:
- domain: serguzim.me
target: tick
volumes_backup:
- influxdb_data
backup:
- name: influxdb_data
type: docker
- name: jellyfin
host: node002
dns:
- domain: serguzim.me
target: media
volumes_backup:
- jellyfin_config
backup:
- name: jellyfin_config
type: docker
#- jellyfin_media # TODO
- name: linkwarden
@ -85,6 +92,9 @@ all_services:
dns:
- domain: serguzim.me
target: mail
backup:
- name: mailcow
type: hook
- name: minio
host: node002
@ -95,16 +105,24 @@ all_services:
target: console.s3
name: minio-console
alias: minio
volumes_backup:
- minio_data
backup:
- name: minio_data
type: docker
- name: ntfy
host: node002
dns:
- domain: serguzim.me
target: push
volumes_backup:
- ntfy_data
backup:
- name: ntfy_data
type: docker
- name: postgresql
host: node002
backup:
- name: postgresql
type: hook
- name: reitanlage_oranienburg
host: node002
@ -115,8 +133,9 @@ all_services:
target: www
name: reitanlage_oranienburg-www
alias: reitanlage_oranienburg
volumes_backup:
- reitanlage-oranienburg_data
backup:
- name: reitanlage-oranienburg_data
type: docker
- name: shlink
host: node002
@ -137,8 +156,9 @@ all_services:
target: matrix
name: synapse_msrg
alias: synapse
volumes_backup:
- synapse_media_store
backup:
- name: synapse_media_store
type: docker
ports:
- 8448:8448
@ -147,16 +167,18 @@ all_services:
dns:
- domain: serguzim.me
target: recipes
volumes_backup:
- tandoor_mediafiles
backup:
- name: tandoor_mediafiles
type: docker
- name: teamspeak_fallback
host: node002
dns:
- domain: serguzim.me
target: ts
volumes_backup:
- teamspeak-fallback-data
backup:
- name: teamspeak-fallback-data
type: docker
- name: telegraf
host: node002
@ -178,16 +200,18 @@ all_services:
dns:
- domain: serguzim.me
target: status
volumes_backup:
- uptime-kuma_data
backup:
- name: uptime-kuma_data
type: docker
- name: vikunja
host: node002
dns:
- domain: serguzim.me
target: todo
volumes_backup:
- vikunja_data
backup:
- name: vikunja_data
type: docker
- name: webhook
host: node002

View file

@ -0,0 +1,38 @@
import copy
class FilterModule(object):
def filters(self):
return {
'map_backup_locations': self.map_backup_locations
}
def map_backup_locations(self, locations, backends, hooks):
result = {}
backends_list = list(backends.keys())
for location in locations:
name = location["name"]
new_location = {
"to": backends_list,
"forget": "yes",
"hooks": copy.deepcopy(hooks)
}
if location["type"] == "docker":
new_location["from"] = name
new_location["type"] = "volume"
if location["type"] == "hook":
backup_dir = f"/opt/services/_backup/{name}"
new_location["from"] = backup_dir
if not "before" in new_location["hooks"]:
new_location["hooks"]["before"] = []
new_location["hooks"]["before"].append(f"/opt/services/backup/hooks/{name} '{backup_dir}'")
if location["type"] == "directory":
new_location["from"] = location["path"]
result[name.lower()] = new_location
return result

View file

@ -1,24 +0,0 @@
class FilterModule(object):
def filters(self):
return {
'map_backup_volumes': self.map_backup_volumes,
'map_backup_volumes_service': self.map_backup_volumes_service
}
def map_backup_volumes(self, volumes):
result = {}
for volume in volumes:
result[volume] = {
"external": True,
}
return result
def map_backup_volumes_service(self, volumes):
result = []
for volume in volumes:
result.append("{volume_name}:/backup/volumes/{volume_name}".format(volume_name=volume))
return result

View file

@ -1,3 +0,0 @@
FROM restic/restic
RUN apk add curl

View file

@ -1,5 +1,6 @@
backup_path="$BACKUP_LOCATION/immich"
mkdir -p "$backup_path"
#!/usr/bin/env bash
backup_path="$1"
cd /opt/services/immich || exit
docker compose exec database sh -c 'pg_dump -U "$DB_USERNAME" "$DB_DATABASE"' | gzip >"$backup_path/immich.sql.gz"

View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
export MAILCOW_BACKUP_LOCATION="$1"
/opt/mailcow-dockerized/helper-scripts/backup_and_restore.sh backup all

View file

@ -1,5 +1,6 @@
mkdir -p "$BACKUP_LOCATION/postgres"
cd "$BACKUP_LOCATION/postgres" || exit
#!/usr/bin/env bash
cd "$1"
postgres_tables=$(sudo -u postgres psql -Atc "SELECT datname FROM pg_database WHERE datistemplate = false;")

View file

@ -1,3 +0,0 @@
export MAILCOW_BACKUP_LOCATION="$BACKUP_LOCATION/mailcow"
mkdir -p "$MAILCOW_BACKUP_LOCATION"
/opt/mailcow-dockerized/helper-scripts/backup_and_restore.sh backup all

View file

@ -1,3 +0,0 @@
export MAILCOW_BACKUP_LOCATION="$BACKUP_LOCATION/mailcow"
mkdir -p "$MAILCOW_BACKUP_LOCATION"
/opt/mailcow-dockerized/helper-scripts/backup_and_restore.sh backup all

View file

@ -1,16 +0,0 @@
---
- name: Set backup.d path
ansible.builtin.set_fact:
backup_d_path: "{{ (service_path, 'backup.d') | path_join }}"
- name: Create backup.d directory
ansible.builtin.file:
path: "{{ backup_d_path }}"
state: directory
mode: "0755"
- name: Copy the additional backup scripts
ansible.builtin.copy:
src: "{{ item }}"
dest: "{{ backup_d_path }}"
mode: "0755"
with_fileglob:
- "{{ ansible_facts.hostname }}/*"

View file

@ -1,12 +0,0 @@
---
- name: Copy the Dockerfile
ansible.builtin.copy:
src: Dockerfile
dest: "{{ (service_path, 'Dockerfile') | path_join }}"
mode: "0644"
register: cmd_result
- name: Set the docker rebuild flag
ansible.builtin.set_fact:
docker_rebuild: true
when: cmd_result.changed # noqa: no-handler We need to handle the restart per service. Handlers don't support variables.

View file

@ -0,0 +1,23 @@
---
- name: Set hooks path
ansible.builtin.set_fact:
hooks_path: "{{ (service_path, 'hooks') | path_join }}"
- name: Create hooks directory
ansible.builtin.file:
path: "{{ hooks_path }}"
state: directory
mode: "0755"
- name: Copy the hooks
ansible.builtin.copy:
src: "{{ item }}"
dest: "{{ hooks_path }}"
mode: "0755"
with_fileglob:
- "hooks/*"
- name: Create the from directories
ansible.builtin.file:
path: "{{ ('/opt/services/_backup', item | basename) | path_join }}"
state: directory
mode: "0755"
with_fileglob:
- "hooks/*"

View file

@ -4,36 +4,31 @@
- name: Deploy {{ svc.name }}
vars:
svc: "{{ backup_svc }}"
env: "{{ backup_env }}"
compose: "{{ backup_compose }}"
yml: "{{ backup_yml }}"
block:
- name: Import prepare tasks for common service
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
- name: Copy the main backup script
- name: Template the main backup script
ansible.builtin.template:
src: "backup.sh.j2"
src: backup.sh.j2
dest: "{{ (service_path, 'backup.sh') | path_join }}"
mode: "0755"
- name: Import tasks specific to docker
ansible.builtin.import_tasks: docker.yml
- name: Import tasks specific to the backup.d scripts
ansible.builtin.import_tasks: backup.d.yml
- name: Template autorestic.yml
ansible.builtin.template:
src: yml.j2
dest: "{{ (service_path, '.autorestic.yml') | path_join }}"
mode: "0644"
- name: Import tasks specific to the hooks scripts
ansible.builtin.import_tasks: hooks.yml
- name: Import tasks specific to systemd
ansible.builtin.import_tasks: systemd.yml
- name: Build service
ansible.builtin.command:
cmd: docker compose build --pull
chdir: "{{ service_path }}"
register: cmd_result
when: docker_rebuild
changed_when: true
- name: Verify service
ansible.builtin.command:
cmd: docker compose run --rm app check
cmd: autorestic -v check
chdir: "{{ service_path }}"
changed_when: false
become: true

View file

@ -1,11 +1,11 @@
[Unit]
Description=Autostart several tools and services
Description=Run the backup script
StartLimitIntervalSec=7200
StartLimitBurst=5
[Service]
Type=oneshot
ExecStart={{ service_path }}/backup.sh
ExecStart={{ (service_path, 'backup.sh') | path_join }}
WorkingDirectory={{ service_path }}
Restart=on-failure
RestartSec=15min

74
roles/backup/templates/backup.sh.j2 Executable file → Normal file
View file

@ -1,68 +1,12 @@
#!/usr/bin/env bash
set -e
{{ backup_hc_command_start }}
set -a
. "{{ service_path }}/service.env"
set +a
duration_start=$(date +%s)
_duration_get () {
duration_end=$(date +%s)
echo "$((duration_end - duration_start))"
}
hc_url="https://hc-ping.com/$HC_UID"
uptime_kuma_url="https://status.serguzim.me/api/push/$UPTIME_KUMA_TOKEN"
_hc_ping () {
curl -fsSL --retry 3 "$hc_url$1" >/dev/null
}
_uptime_kuma_ping () {
duration=$(_duration_get)
curl -fsSL --retry 3 \
--url-query "status=$1" \
--url-query "msg=$2" \
--url-query "ping=${duration}000" \
"$uptime_kuma_url" >/dev/null
}
_fail () {
_hc_ping "/fail"
_uptime_kuma_ping "down" "$1"
rm -rf "$BACKUP_LOCATION"
exit 1
}
_success () {
_hc_ping
_uptime_kuma_ping "up" "backup successful"
}
_hc_ping "/start"
BACKUP_LOCATION="$(mktemp -d --suffix=-backup)"
export BACKUP_LOCATION
cd "$BACKUP_LOCATION" || _fail "failed to cd to $BACKUP_LOCATION"
shopt -s nullglob
for file in "{{ service_path }}/backup.d/"*
do
file_name="$(basename "$file")"
echo ""
echo "running $file_name"
time "$file" >"/tmp/$file_name.log" || _fail "error while running $file_name"
done || true
cd "{{ service_path }}"
docker compose run --rm -v "$BACKUP_LOCATION:/backup/misc" app backup /backup || _fail "error during restic backup"
_success
rm -rf "$BACKUP_LOCATION"
echo "forgetting old backups for {{ ansible_facts.hostname }}"
docker compose run --rm app forget --host "{{ ansible_facts.hostname }}" --prune \
--keep-last 7 \
--keep-daily 14 \
--keep-weekly 16 \
--keep-monthly 12 \
--keep-yearly 2
if autorestic backup -av --ci
then
{{ backup_hc_command_success }}
{{ backup_uk_command_success }}
else
{{ backup_hc_command_fail }}
{{ backup_uk_command_fail }}
fi

View file

@ -1,60 +1,46 @@
---
backup_image: "{{ (container_registry.public, 'services/backup') | path_join }}"
backup_svc:
name: backup
backup_volumes_list: "{{ all_services | my_service_attributes(inventory_hostname, 'volumes_backup') }}"
backup_volumes_service: "{{ backup_volumes_list | map_backup_volumes_service }}"
backup_list: "{{ all_services | my_service_attributes(inventory_hostname, 'backup') }}"
backup_env:
HC_UID: "{{ host_backup.hc_uid }}"
UPTIME_KUMA_TOKEN: "{{ host_backup.uptime_kuma_token }}"
backup_msg_start: "Backup started"
backup_msg_fail: "Backup failed"
backup_msg_fail_location: "Backup failed for location: "
backup_msg_success: "Backup successful"
RESTIC_REPOSITORY: "{{ vault_backup.restic.s3.repository }}"
RESTIC_PASSWORD: "{{ vault_backup.restic.s3.password }}"
backup_curl_base: 'curl -L -m 10 --retry 5'
backup_hc_curl_base: '{{ backup_curl_base }} -X POST -H "Content-Type: text/plain"'
backup_uk_curl_base: '{{ backup_curl_base }}'
backup_hc_url: 'https://hc-ping.com/{{ host_backup.hc_uid }}'
backup_uk_url: 'https://status.serguzim.me/api/push/{{ host_backup.uptime_kuma_token }}'
AWS_ACCESS_KEY_ID: "{{ vault_backup.restic.s3.access_key_id }}"
AWS_SECRET_ACCESS_KEY: "{{ vault_backup.restic.s3.secret_access_key }}"
backup_hc_command_start: '{{ backup_hc_curl_base }} --data "{{ backup_msg_start }}" {{ backup_hc_url }}/start'
backup_hc_command_success: '{{ backup_hc_curl_base }} --data "{{ backup_msg_success }}" {{ backup_hc_url }}'
backup_uk_command_success: '{{ backup_uk_curl_base }} "{{ backup_uk_url }}?status=up&msg={{ backup_msg_success | urlencode }}&ping="'
backup_hc_command_fail: '{{ backup_hc_curl_base }} --data "{{ backup_msg_fail }}" {{ backup_hc_url }}/fail'
backup_uk_command_fail: '{{ backup_uk_curl_base }} "{{ backup_uk_url }}?status=down&msg={{ backup_msg_fail | urlencode }}&ping="'
#RESTIC_S3_REPOSITORY: "{{ vault_backup.restic.s3.repository }}"
#RESTIC_S3_PASSWORD: "{{ vault_backup.restic.s3.password }}"
#RESITC_S3_ACCESS_KEY_ID: "{{ vault_backup.restic.s3.access_key_id }}"
#RESITC_S3_SECRET_ACCESS_KEY: "{{ vault_backup.restic.s3.secret_access_key }}"
backup_default_hooks:
failure:
- '{{ backup_hc_curl_base }} --data "{{ backup_msg_fail_location }}${AUTORESTIC_LOCATION}" {{ backup_hc_url }}/fail'
- '{{ backup_uk_curl_base }} "{{ backup_uk_url }}?status=down&msg={{ backup_msg_fail_location | urlencode }}${AUTORESTIC_LOCATION}&ping="'
#RESTIC_BORGBASE: "{{ vault_backup.restic.borgbase }}"
backup_yml:
version: 2
backup_compose:
watchtower: false
image: "{{ backup_image }}"
volumes: "{{ backup_volumes_service }}"
file:
services:
app:
build:
context: .
entrypoint:
- /usr/bin/restic
- --retry-lock=1m
restart: never
hostname: "{{ ansible_facts.hostname }}"
mount:
build:
context: .
image: "{{ backup_image }}"
restart: never
hostname: "{{ ansible_facts.hostname }}"
env_file:
- service.env
entrypoint:
- /usr/bin/restic
- --retry-lock=1m
command:
- mount
- /mnt
privileged: true
devices:
- /dev/fuse
backends: "{{ vault_backup.locations }}"
volumes: "{{ backup_volumes_list | map_backup_volumes }}"
locations: "{{ backup_list | map_backup_locations(vault_backup.locations, backup_default_hooks ) }}"
global:
forget:
keep-last: 7
keep-daily: 14
keep-weekly: 16
keep-monthly: 12
keep-yearly: 2
host: "{{ ansible_facts.hostname }}"
backup:
host: "{{ ansible_facts.hostname }}"

View file