Migrate services part
This commit is contained in:
parent
7c59e4ae57
commit
73bce8f6e5
157 changed files with 3883 additions and 9 deletions
roles/backup
files
tasks
templates
vars
3
roles/backup/files/Dockerfile
Normal file
3
roles/backup/files/Dockerfile
Normal file
|
@ -0,0 +1,3 @@
|
|||
FROM restic/restic
|
||||
|
||||
RUN apk add curl
|
4
roles/backup/files/backup.timer
Normal file
4
roles/backup/files/backup.timer
Normal file
|
@ -0,0 +1,4 @@
|
|||
[Timer]
|
||||
OnCalendar=*-*-* 04:10:00
|
||||
[Install]
|
||||
WantedBy=timers.target
|
3
roles/backup/files/node001/mailcow.sh
Executable file
3
roles/backup/files/node001/mailcow.sh
Executable file
|
@ -0,0 +1,3 @@
|
|||
export MAILCOW_BACKUP_LOCATION="$BACKUP_LOCATION/mailcow"
|
||||
mkdir -p "$MAILCOW_BACKUP_LOCATION"
|
||||
/opt/mailcow-dockerized/helper-scripts/backup_and_restore.sh backup all
|
5
roles/backup/files/node002/immich.sh
Executable file
5
roles/backup/files/node002/immich.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
backup_path="$BACKUP_LOCATION/immich"
|
||||
mkdir -p "$backup_path"
|
||||
|
||||
cd /opt/services/immich || exit
|
||||
docker compose exec database sh -c 'pg_dump -U "$DB_USERNAME" "$DB_DATABASE"' | gzip >"$backup_path/immich.sql.gz"
|
14
roles/backup/files/node002/postgres.sh
Executable file
14
roles/backup/files/node002/postgres.sh
Executable file
|
@ -0,0 +1,14 @@
|
|||
mkdir -p "$BACKUP_LOCATION/postgres"
|
||||
cd "$BACKUP_LOCATION/postgres" || exit
|
||||
|
||||
postgres_tables=$(sudo -u postgres psql -Atc "SELECT datname FROM pg_database WHERE datistemplate = false;")
|
||||
|
||||
for i in $postgres_tables
|
||||
do
|
||||
printf "dumping %s ..." "$i"
|
||||
sudo -u postgres pg_dump "$i" | gzip >"pg_dump_$i.sql.gz"
|
||||
echo " done"
|
||||
done
|
||||
|
||||
echo "dumping all"
|
||||
sudo -u postgres pg_dumpall | gzip >"pg_dumpall.sql.gz"
|
3
roles/backup/files/node003/mailcow.sh
Executable file
3
roles/backup/files/node003/mailcow.sh
Executable file
|
@ -0,0 +1,3 @@
|
|||
export MAILCOW_BACKUP_LOCATION="$BACKUP_LOCATION/mailcow"
|
||||
mkdir -p "$MAILCOW_BACKUP_LOCATION"
|
||||
/opt/mailcow-dockerized/helper-scripts/backup_and_restore.sh backup all
|
16
roles/backup/tasks/backup.d.yml
Normal file
16
roles/backup/tasks/backup.d.yml
Normal file
|
@ -0,0 +1,16 @@
|
|||
---
|
||||
- name: Set backup.d path
|
||||
ansible.builtin.set_fact:
|
||||
backup_d_path: "{{ (service_path, 'backup.d') | path_join }}"
|
||||
- name: Create backup.d directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ backup_d_path }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
- name: Copy the additional backup scripts
|
||||
ansible.builtin.copy:
|
||||
src: "{{ item }}"
|
||||
dest: "{{ backup_d_path }}"
|
||||
mode: "0755"
|
||||
with_fileglob:
|
||||
- "{{ ansible_facts.hostname }}/*"
|
12
roles/backup/tasks/docker.yml
Normal file
12
roles/backup/tasks/docker.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- name: Copy the Dockerfile
|
||||
ansible.builtin.copy:
|
||||
src: Dockerfile
|
||||
dest: "{{ (service_path, 'Dockerfile') | path_join }}"
|
||||
mode: "0644"
|
||||
register: cmd_result
|
||||
|
||||
- name: Set the docker rebuild flag
|
||||
ansible.builtin.set_fact:
|
||||
docker_rebuild: true
|
||||
when: cmd_result.changed # noqa: no-handler We need to handle the restart per service. Handlers don't support variables.
|
39
roles/backup/tasks/main.yml
Normal file
39
roles/backup/tasks/main.yml
Normal file
|
@ -0,0 +1,39 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ svc.name }}
|
||||
vars:
|
||||
svc: "{{ backup_svc }}"
|
||||
env: "{{ backup_env }}"
|
||||
compose: "{{ backup_compose }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Copy the main backup script
|
||||
ansible.builtin.template:
|
||||
src: "backup.sh.j2"
|
||||
dest: "{{ (service_path, 'backup.sh') | path_join }}"
|
||||
mode: "0755"
|
||||
|
||||
- name: Import tasks specific to docker
|
||||
ansible.builtin.import_tasks: docker.yml
|
||||
- name: Import tasks specific to the backup.d scripts
|
||||
ansible.builtin.import_tasks: backup.d.yml
|
||||
- name: Import tasks specific to systemd
|
||||
ansible.builtin.import_tasks: systemd.yml
|
||||
|
||||
- name: Build service
|
||||
ansible.builtin.command:
|
||||
cmd: docker compose build --pull
|
||||
chdir: "{{ service_path }}"
|
||||
register: cmd_result
|
||||
when: docker_rebuild
|
||||
changed_when: true
|
||||
|
||||
- name: Verify service
|
||||
ansible.builtin.command:
|
||||
cmd: docker compose run --rm app check
|
||||
chdir: "{{ service_path }}"
|
||||
changed_when: false
|
20
roles/backup/tasks/systemd.yml
Normal file
20
roles/backup/tasks/systemd.yml
Normal file
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
- name: Copy the system service
|
||||
ansible.builtin.template:
|
||||
src: backup.service.j2
|
||||
dest: /etc/systemd/system/backup.service
|
||||
mode: "0644"
|
||||
become: true
|
||||
- name: Copy the system timer
|
||||
ansible.builtin.copy:
|
||||
src: backup.timer
|
||||
dest: /etc/systemd/system/backup.timer
|
||||
mode: "0644"
|
||||
become: true
|
||||
- name: Enable the system timer
|
||||
ansible.builtin.systemd_service:
|
||||
name: backup.timer
|
||||
state: started
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
become: true
|
11
roles/backup/templates/backup.service.j2
Normal file
11
roles/backup/templates/backup.service.j2
Normal file
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Autostart several tools and services
|
||||
StartLimitIntervalSec=7200
|
||||
StartLimitBurst=5
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart={{ service_path }}/backup.sh
|
||||
WorkingDirectory={{ service_path }}
|
||||
Restart=on-failure
|
||||
RestartSec=15min
|
68
roles/backup/templates/backup.sh.j2
Executable file
68
roles/backup/templates/backup.sh.j2
Executable file
|
@ -0,0 +1,68 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
set -a
|
||||
. "{{ service_path }}/service.env"
|
||||
set +a
|
||||
|
||||
duration_start=$(date +%s)
|
||||
_duration_get () {
|
||||
duration_end=$(date +%s)
|
||||
echo "$((duration_end - duration_start))"
|
||||
}
|
||||
|
||||
hc_url="https://hc-ping.com/$HC_UID"
|
||||
uptime_kuma_url="https://status.serguzim.me/api/push/$UPTIME_KUMA_TOKEN"
|
||||
_hc_ping () {
|
||||
curl -fsSL --retry 3 "$hc_url$1" >/dev/null
|
||||
}
|
||||
_uptime_kuma_ping () {
|
||||
duration=$(_duration_get)
|
||||
curl -fsSL --retry 3 \
|
||||
--url-query "status=$1" \
|
||||
--url-query "msg=$2" \
|
||||
--url-query "ping=${duration}000" \
|
||||
"$uptime_kuma_url" >/dev/null
|
||||
}
|
||||
|
||||
_fail () {
|
||||
_hc_ping "/fail"
|
||||
_uptime_kuma_ping "down" "$1"
|
||||
rm -rf "$BACKUP_LOCATION"
|
||||
exit 1
|
||||
}
|
||||
_success () {
|
||||
_hc_ping
|
||||
_uptime_kuma_ping "up" "backup successful"
|
||||
}
|
||||
|
||||
_hc_ping "/start"
|
||||
|
||||
BACKUP_LOCATION="$(mktemp -d --suffix=-backup)"
|
||||
export BACKUP_LOCATION
|
||||
cd "$BACKUP_LOCATION" || _fail "failed to cd to $BACKUP_LOCATION"
|
||||
|
||||
shopt -s nullglob
|
||||
for file in "{{ service_path }}/backup.d/"*
|
||||
do
|
||||
file_name="$(basename "$file")"
|
||||
echo ""
|
||||
echo "running $file_name"
|
||||
time "$file" >"/tmp/$file_name.log" || _fail "error while running $file_name"
|
||||
done || true
|
||||
|
||||
cd "{{ service_path }}"
|
||||
docker compose run --rm -v "$BACKUP_LOCATION:/backup/misc" app backup /backup || _fail "error during restic backup"
|
||||
|
||||
_success
|
||||
|
||||
rm -rf "$BACKUP_LOCATION"
|
||||
|
||||
echo "forgetting old backups for {{ ansible_facts.hostname }}"
|
||||
docker compose run --rm app forget --host "{{ ansible_facts.hostname }}" --prune \
|
||||
--keep-last 7 \
|
||||
--keep-daily 14 \
|
||||
--keep-weekly 16 \
|
||||
--keep-monthly 12 \
|
||||
--keep-yearly 2
|
59
roles/backup/vars/main.yml
Normal file
59
roles/backup/vars/main.yml
Normal file
|
@ -0,0 +1,59 @@
|
|||
---
|
||||
|
||||
backup_image: registry.serguzim.me/services/backup
|
||||
|
||||
backup_svc:
|
||||
name: backup
|
||||
|
||||
backup_volumes_service: "{{ host_backup.volumes | map_backup_volumes_service }}"
|
||||
|
||||
backup_env:
|
||||
HC_UID: "{{ host_backup.hc_uid }}"
|
||||
UPTIME_KUMA_TOKEN: "{{ host_backup.uptime_kuma_token }}"
|
||||
|
||||
RESTIC_REPOSITORY: "{{ vault_backup.restic.s3.repository }}"
|
||||
RESTIC_PASSWORD: "{{ vault_backup.restic.s3.password }}"
|
||||
|
||||
AWS_ACCESS_KEY_ID: "{{ vault_backup.restic.s3.access_key_id }}"
|
||||
AWS_SECRET_ACCESS_KEY: "{{ vault_backup.restic.s3.secret_access_key }}"
|
||||
|
||||
#RESTIC_S3_REPOSITORY: "{{ vault_backup.restic.s3.repository }}"
|
||||
#RESTIC_S3_PASSWORD: "{{ vault_backup.restic.s3.password }}"
|
||||
#RESITC_S3_ACCESS_KEY_ID: "{{ vault_backup.restic.s3.access_key_id }}"
|
||||
#RESITC_S3_SECRET_ACCESS_KEY: "{{ vault_backup.restic.s3.secret_access_key }}"
|
||||
|
||||
#RESTIC_BORGBASE: "{{ vault_backup.restic.borgbase }}"
|
||||
|
||||
backup_compose:
|
||||
watchtower: false
|
||||
image: "{{ backup_image }}"
|
||||
volumes: "{{ backup_volumes_service }}"
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
entrypoint:
|
||||
- /usr/bin/restic
|
||||
- --retry-lock=1m
|
||||
restart: never
|
||||
hostname: "{{ ansible_facts.hostname }}"
|
||||
mount:
|
||||
build:
|
||||
context: .
|
||||
image: "{{ backup_image }}"
|
||||
restart: never
|
||||
hostname: "{{ ansible_facts.hostname }}"
|
||||
env_file:
|
||||
- service.env
|
||||
entrypoint:
|
||||
- /usr/bin/restic
|
||||
- --retry-lock=1m
|
||||
command:
|
||||
- mount
|
||||
- /mnt
|
||||
privileged: true
|
||||
devices:
|
||||
- /dev/fuse
|
||||
|
||||
volumes: "{{ host_backup.volumes | map_backup_volumes }}"
|
Loading…
Add table
Add a link
Reference in a new issue