Compare commits
5 commits
f8c478b2e6
...
03f83ef7fd
Author | SHA1 | Date | |
---|---|---|---|
03f83ef7fd | |||
c95be92f46 | |||
b6297638db | |||
40742e3214 | |||
519882db43 |
130 changed files with 414 additions and 150 deletions
48
.gitignore
vendored
48
.gitignore
vendored
|
@ -1,47 +1 @@
|
|||
*.secret.env
|
||||
|
||||
.lego/
|
||||
|
||||
# services already handled by ansible
|
||||
/acme-dns/
|
||||
/authentik/
|
||||
/coder/
|
||||
/forgejo/
|
||||
/forgejo-runner/
|
||||
/gitea/
|
||||
/gitea-runner/
|
||||
/harbor/
|
||||
/healthcheck/
|
||||
/homebox/
|
||||
/influxdb/
|
||||
/jellyfin/
|
||||
/minecraft-2/
|
||||
/minio/
|
||||
/synapse/
|
||||
/tandoor/
|
||||
/telegraf/
|
||||
/tinytinyrss/
|
||||
/umami/
|
||||
/uptime-kuma/
|
||||
/watchtower/
|
||||
/webdis/
|
||||
/wiki-js/
|
||||
/woodpecker/
|
||||
|
||||
/caddy/config/conf.002.d/acme.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/auth.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/analytics.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/ci.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/coder.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/faas.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/git.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/inventory.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/matrix.msrg.cc.conf
|
||||
/caddy/config/conf.002.d/media.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/recipes.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/registry.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/rss.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/status.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/tick.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/webdis.huck.serguzim.me.conf
|
||||
/caddy/config/conf.002.d/wiki.serguzim.me.conf
|
||||
inventory/group_vars/all/serguzim.net.yml
|
||||
|
|
|
@ -4,4 +4,4 @@ repos:
|
|||
hooks:
|
||||
- id: ansible-lint
|
||||
args:
|
||||
- _ansible/serguzim.net.yml
|
||||
- serguzim.net.yml
|
||||
|
|
2
_ansible/.gitignore
vendored
2
_ansible/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
.vault_pass
|
||||
inventory/group_vars/all/serguzim.net.yml
|
|
@ -1,2 +0,0 @@
|
|||
ansible_port: "{{ vault_node002.ansible_port }}"
|
||||
ansible_user: "{{ vault_node002.ansible_user }}"
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
LEGO_WORKING_PATH="/opt/services/.lego/"
|
||||
export LEGO_WORKING_PATH="/opt/lego/"
|
||||
|
||||
set -e
|
||||
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
HC_UID=
|
||||
UPTIME_KUMA_TOKEN=
|
||||
|
||||
RESTIC_REPOSITORY=
|
||||
RESTIC_PASSWORD=
|
||||
|
||||
AWS_ACCESS_KEY_ID=
|
||||
AWS_SECRET_ACCESS_KEY=
|
2
backup/.gitignore
vendored
2
backup/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
/mailcheck.mail
|
||||
/msmtprc
|
|
@ -1,4 +0,0 @@
|
|||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/opt/services/backup/backup.sh
|
||||
WorkingDirectory=/opt/services/backup/
|
|
@ -1,65 +0,0 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
set -e
|
||||
|
||||
set -a
|
||||
. "/opt/services/backup/.env"
|
||||
. "/opt/services/backup/.secret.env"
|
||||
set +a
|
||||
|
||||
hc_url="https://hc-ping.com/$HC_UID"
|
||||
|
||||
curl -fsSL --retry 3 "$hc_url/start" >/dev/null
|
||||
duration_start=$(date +%s)
|
||||
|
||||
BACKUP_LOCATION="/tmp/backup-misc"
|
||||
|
||||
rm -rf "$BACKUP_LOCATION"
|
||||
mkdir -p "$BACKUP_LOCATION"
|
||||
cd "$BACKUP_LOCATION" || exit
|
||||
|
||||
_hc_fail () {
|
||||
curl -fsSL --retry 3 "$hc_url/fail"
|
||||
exit 1
|
||||
}
|
||||
|
||||
_backup_prepare_postgres () {
|
||||
mkdir -p "$BACKUP_LOCATION/postgres"
|
||||
cd "$BACKUP_LOCATION/postgres" || exit
|
||||
|
||||
postgres_tables=$(sudo -u postgres psql -Atc "SELECT datname FROM pg_database WHERE datistemplate = false;")
|
||||
|
||||
for i in $postgres_tables
|
||||
do
|
||||
echo "dumping $i"
|
||||
sudo -u postgres pg_dump "$i" | gzip >"pg_dump_$i.gz"
|
||||
echo "done with $i"
|
||||
echo ""
|
||||
done
|
||||
|
||||
echo "dumping all"
|
||||
sudo -u postgres pg_dumpall | gzip >"pg_dumpall.gz"
|
||||
}
|
||||
|
||||
_backup_prepare_mailcow () {
|
||||
export MAILCOW_BACKUP_LOCATION="$BACKUP_LOCATION/mailcow"
|
||||
mkdir -p "$MAILCOW_BACKUP_LOCATION"
|
||||
/opt/mailcow-dockerized/helper-scripts/backup_and_restore.sh \
|
||||
backup all --delete-days 3
|
||||
}
|
||||
|
||||
echo ""
|
||||
echo "preparing postgres"
|
||||
time _backup_prepare_postgres >/tmp/backup-postgres.log || _hc_fail
|
||||
echo ""
|
||||
echo "preparing mailcow"
|
||||
time _backup_prepare_mailcow >/tmp/backup-mailcow.log || _hc_fail
|
||||
|
||||
cd /opt/services/backup/
|
||||
docker compose run --rm backup || _hc_fail
|
||||
|
||||
duration_end=$(date +%s)
|
||||
curl -fsSL --retry 3 "$hc_url"
|
||||
|
||||
duration=$((duration_end - duration_start))
|
||||
curl -fsSL --retry 3 "https://status.serguzim.me/api/push/$UPTIME_KUMA_TOKEN?status=up&msg=OK&ping=${duration}000"
|
24
filter_plugins/map_backup_volumes.py
Normal file
24
filter_plugins/map_backup_volumes.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
class FilterModule(object):
|
||||
def filters(self):
|
||||
return {
|
||||
'map_backup_volumes': self.map_backup_volumes,
|
||||
'map_backup_volumes_service': self.map_backup_volumes_service
|
||||
}
|
||||
|
||||
def map_backup_volumes(self, volumes):
|
||||
result = {}
|
||||
|
||||
for volume in volumes:
|
||||
result[volume] = {
|
||||
"external": True,
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
def map_backup_volumes_service(self, volumes):
|
||||
result = []
|
||||
|
||||
for volume in volumes:
|
||||
result.append("{volume_name}:/backup/volumes/{volume_name}".format(volume_name=volume))
|
||||
|
||||
return result
|
|
@ -9,6 +9,9 @@ mailer:
|
|||
host: mail.serguzim.me
|
||||
port: 587
|
||||
|
||||
acme_dns:
|
||||
host: acme.serguzim.me
|
||||
|
||||
|
||||
services_path: /opt/services/
|
||||
caddy_path: "{{ (services_path, 'caddy') | path_join }}"
|
13
inventory/host_vars/node002/main.yml
Normal file
13
inventory/host_vars/node002/main.yml
Normal file
|
@ -0,0 +1,13 @@
|
|||
ansible_port: "{{ vault_node002.ansible_port }}"
|
||||
ansible_user: "{{ vault_node002.ansible_user }}"
|
||||
host_backup:
|
||||
backup:
|
||||
hc_uid: "{{ vault_node002.backup.hc_uid }}"
|
||||
uptime_kuma_token: "{{ vault_node002.backup.uptime_kuma_token }}"
|
||||
volumes:
|
||||
- gitea_data
|
||||
- influxdb_data
|
||||
- reitanlage-oranienburg_data
|
||||
- synapse_media_store
|
||||
- tandoor_mediafiles
|
||||
|
|
@ -1,8 +1,9 @@
|
|||
---
|
||||
- name: Run roles for local-dev
|
||||
vars:
|
||||
# Remove _ansible and inventory
|
||||
services_path: "{{ inventory_dir.split('/')[0:-2] | join('/') }}"
|
||||
# Remove inventory
|
||||
base_path: "{{ inventory_dir.split('/')[0:-1] | join('/') }}"
|
||||
services_path: "{{ (base_path, '_services') | path_join }}"
|
||||
caddy_config_path: "{{ (services_path, 'caddy', 'config', 'conf.d') | path_join }}"
|
||||
|
||||
hosts: local-dev
|
|
@ -4,6 +4,8 @@
|
|||
roles:
|
||||
- role: common
|
||||
tags: [always]
|
||||
- role: backup
|
||||
tags: [backup]
|
||||
- role: caddy
|
||||
tags: [caddy, reverse-proxy, webserver]
|
||||
vars:
|
||||
|
@ -36,10 +38,14 @@
|
|||
tags: [influxdb, sensors, monitoring]
|
||||
- role: jellyfin
|
||||
tags: [jellyfin, media]
|
||||
- role: reitanlage_oranienburg
|
||||
tags: [reitanlage-oranienburg, website]
|
||||
- role: synapse
|
||||
tags: [synapse, matrix, communication]
|
||||
- role: tandoor
|
||||
tags: [tandoor, recipes]
|
||||
- role: teamspeak_fallback
|
||||
tags: [teamspeak-fallback, communication]
|
||||
- role: telegraf
|
||||
tags: [telegraf, monitoring]
|
||||
- role: tinytinyrss
|
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
acme_dns_svc:
|
||||
domain: acme.serguzim.me
|
||||
domain: "{{ acme_dns.host }}"
|
||||
name: acme-dns
|
||||
port: 80
|
||||
nsadmin: "{{ admin_email | regex_replace('@', '.') }}"
|
|
@ -7,8 +7,7 @@ services:
|
|||
image: backup
|
||||
restart: never
|
||||
env_file:
|
||||
- .env
|
||||
- .secret.env
|
||||
- service.env
|
||||
volumes:
|
||||
- /tmp/backup-misc:/backup/misc
|
||||
- gitea_data:/backup/volumes/gitea_data
|
3
roles/backup/files/node001/mailcow.sh
Executable file
3
roles/backup/files/node001/mailcow.sh
Executable file
|
@ -0,0 +1,3 @@
|
|||
export MAILCOW_BACKUP_LOCATION="$BACKUP_LOCATION/mailcow"
|
||||
mkdir -p "$MAILCOW_BACKUP_LOCATION"
|
||||
/opt/mailcow-dockerized/helper-scripts/backup_and_restore.sh backup all
|
14
roles/backup/files/node002/postgres.sh
Executable file
14
roles/backup/files/node002/postgres.sh
Executable file
|
@ -0,0 +1,14 @@
|
|||
mkdir -p "$BACKUP_LOCATION/postgres"
|
||||
cd "$BACKUP_LOCATION/postgres" || exit
|
||||
|
||||
postgres_tables=$(sudo -u postgres psql -Atc "SELECT datname FROM pg_database WHERE datistemplate = false;")
|
||||
|
||||
for i in $postgres_tables
|
||||
do
|
||||
printf "dumping %s ..." "$i"
|
||||
sudo -u postgres pg_dump "$i" | gzip >"pg_dump_$i.gz"
|
||||
echo " done"
|
||||
done
|
||||
|
||||
echo "dumping all"
|
||||
sudo -u postgres pg_dumpall | gzip >"pg_dumpall.gz"
|
84
roles/backup/tasks/main.yml
Normal file
84
roles/backup/tasks/main.yml
Normal file
|
@ -0,0 +1,84 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ svc.name }}
|
||||
vars:
|
||||
svc: "{{ backup_svc }}"
|
||||
env: "{{ backup_env }}"
|
||||
compose: "{{ backup_compose }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Copy the Dockerfile
|
||||
ansible.builtin.copy:
|
||||
src: Dockerfile
|
||||
dest: "{{ (service_path, 'Dockerfile') | path_join }}"
|
||||
mode: "0644"
|
||||
register: cmd_result
|
||||
|
||||
- name: Set the docker rebuild flag
|
||||
ansible.builtin.set_fact:
|
||||
docker_rebuild: true
|
||||
when: cmd_result.changed # noqa: no-handler We need to handle the restart per service. Handlers don't support variables.
|
||||
|
||||
- name: Set backup.d path
|
||||
ansible.builtin.set_fact:
|
||||
backup_d_path: "{{ (service_path, 'backup.d') | path_join }}"
|
||||
|
||||
- name: Create backup.d directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ backup_d_path }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
|
||||
- name: Copy the additional backup scripts
|
||||
ansible.builtin.copy:
|
||||
src: "{{ item }}"
|
||||
dest: "{{ backup_d_path }}"
|
||||
mode: "0755"
|
||||
with_fileglob:
|
||||
- "{{ ansible_facts.hostname }}/*"
|
||||
|
||||
- name: Copy the main backup scripts
|
||||
ansible.builtin.template:
|
||||
src: "backup.sh.j2"
|
||||
dest: "{{ (service_path, 'backup.sh') | path_join }}"
|
||||
mode: "0755"
|
||||
|
||||
- name: Copy the system service
|
||||
ansible.builtin.template:
|
||||
src: backup.service.j2
|
||||
dest: /etc/systemd/system/backup.service
|
||||
mode: "0644"
|
||||
become: true
|
||||
- name: Copy the system timer
|
||||
ansible.builtin.copy:
|
||||
src: backup.timer
|
||||
dest: /etc/systemd/system/backup.timer
|
||||
mode: "0644"
|
||||
become: true
|
||||
- name: Enable the system timer
|
||||
ansible.builtin.systemd_service:
|
||||
name: backup.timer
|
||||
state: started
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
become: true
|
||||
|
||||
- name: Import tasks create a service.env file
|
||||
ansible.builtin.import_tasks: tasks/steps/template-service-env.yml
|
||||
|
||||
- name: Build service
|
||||
ansible.builtin.command:
|
||||
cmd: docker compose build --pull
|
||||
chdir: "{{ service_path }}"
|
||||
register: cmd_result
|
||||
changed_when: true # TODO
|
||||
|
||||
- name: Verify service
|
||||
ansible.builtin.command:
|
||||
cmd: docker compose run --rm app check
|
||||
chdir: "{{ service_path }}"
|
||||
changed_when: false
|
4
roles/backup/templates/backup.service.j2
Normal file
4
roles/backup/templates/backup.service.j2
Normal file
|
@ -0,0 +1,4 @@
|
|||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart={{ service_path }}/backup.sh
|
||||
WorkingDirectory={{ service_path }}
|
50
roles/backup/templates/backup.sh.j2
Executable file
50
roles/backup/templates/backup.sh.j2
Executable file
|
@ -0,0 +1,50 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
set -a
|
||||
. "{{ service_path }}/service.env"
|
||||
set +a
|
||||
|
||||
hc_url="https://hc-ping.com/$HC_UID"
|
||||
_hc_ping () {
|
||||
curl -fsSL --retry 3 "$hc_url$1" >/dev/null
|
||||
}
|
||||
|
||||
_hc_fail () {
|
||||
_hc_ping "/fail"
|
||||
exit 1
|
||||
}
|
||||
|
||||
_hc_ping "/start"
|
||||
duration_start=$(date +%s)
|
||||
|
||||
BACKUP_LOCATION="$(mktemp -d)"
|
||||
export BACKUP_LOCATION
|
||||
cd "$BACKUP_LOCATION" || exit
|
||||
|
||||
shopt -s nullglob
|
||||
for file in "{{ service_path }}/backup.d/"*
|
||||
do
|
||||
echo ""
|
||||
echo "running $file"
|
||||
time "$file" || _hc_fail
|
||||
done || true
|
||||
|
||||
cd "{{ service_path }}"
|
||||
docker compose run --rm -v "$BACKUP_LOCATION:/backup/misc" app backup /backup || _hc_fail
|
||||
|
||||
duration_end=$(date +%s)
|
||||
_hc_ping
|
||||
|
||||
duration=$((duration_end - duration_start))
|
||||
curl -fsSL --retry 3 "https://status.serguzim.me/api/push/$UPTIME_KUMA_TOKEN?status=up&msg=OK&ping=${duration}000"
|
||||
|
||||
rm -rf "$BACKUP_LOCATION"
|
||||
|
||||
echo "forgetting old backups for $(hostname)"
|
||||
docker compose run --rm app forget --host "$(hostname)" --prune \
|
||||
--keep-daily 14 \
|
||||
--keep-weekly 28 \
|
||||
--keep-monthly 12 \
|
||||
--keep-yearly 2
|
28
roles/backup/vars/main.yml
Normal file
28
roles/backup/vars/main.yml
Normal file
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
backup_svc:
|
||||
name: backup
|
||||
|
||||
backup_volumes_service: "{{ host_backup.backup.volumes | map_backup_volumes_service }}"
|
||||
|
||||
backup_env:
|
||||
HC_UID: "{{ host_backup.backup.hc_uid }}"
|
||||
UPTIME_KUMA_TOKEN: "{{ host_backup.backup.uptime_kuma_token }}"
|
||||
|
||||
RESTIC_REPOSITORY: "{{ vault_backup.restic.repository }}"
|
||||
RESTIC_PASSWORD: "{{ vault_backup.restic.password }}"
|
||||
|
||||
AWS_ACCESS_KEY_ID: "{{ vault_backup.aws_secret.key_id }}"
|
||||
AWS_SECRET_ACCESS_KEY: "{{ vault_backup.aws_secret.access_key }}"
|
||||
|
||||
backup_compose:
|
||||
watchtower: false
|
||||
image: backup
|
||||
volumes: "{{ backup_volumes_service }}"
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
restart: never
|
||||
hostname: "{{ ansible_facts.hostname }}"
|
||||
volumes: "{{ host_backup.backup.volumes | map_backup_volumes }}"
|
|
@ -2,7 +2,7 @@
|
|||
caddy_acmedns_user: "{{ vault_caddy.acmedns.user }}"
|
||||
caddy_acmedns_pass: "{{ vault_caddy.acmedns.pass }}"
|
||||
caddy_acmedns_subd: "{{ vault_caddy.acmedns.subd }}"
|
||||
caddy_acmedns_url: "https://acme.serguzim.me"
|
||||
caddy_acmedns_url: "https://{{ acme_dns.host }}"
|
||||
|
||||
caddy_ports_default:
|
||||
- 80:80
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
- name: Template config
|
||||
ansible.builtin.template:
|
||||
src: service.yml.j2
|
||||
src: yml.j2
|
||||
dest: "{{ (service_path, 'harbor.yml') | path_join }}"
|
||||
mode: "0644"
|
||||
|
|
@ -36,8 +36,8 @@ harbor_yml:
|
|||
port: "{{ harbor_port_http }}"
|
||||
https:
|
||||
port: "{{ harbor_port_https }}"
|
||||
certificate: /opt/services/.lego/certificates/registry.serguzim.me.crt
|
||||
private_key: /opt/services/.lego/certificates/registry.serguzim.me.key
|
||||
certificate: /opt/services/.lego/certificates/registry.serguzim.me.crt # TODO
|
||||
private_key: /opt/services/.lego/certificates/registry.serguzim.me.key # TODO
|
||||
external_url: https://registry.serguzim.me
|
||||
harbor_admin_password: "{{ vault_harbor.admin_password }}"
|
||||
data_volume: "{{ (service_path, 'data') | path_join }}"
|
|
@ -14,7 +14,7 @@ check_url ()
|
|||
printf "checking url %s ." "$url"
|
||||
dig A "$1" >/dev/null
|
||||
printf "."
|
||||
result=$(curl -LsSfv "$url" 2>&1)
|
||||
result=$(curl -LsSfv --retry 3 "$url" 2>&1)
|
||||
code="$?"
|
||||
printf ".\n"
|
||||
#shellcheck disable=SC2181
|
|
@ -1,4 +1,4 @@
|
|||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/usr/bin/docker compose run --rm %i
|
||||
WorkingDirectory=/opt/services/healthcheck/
|
||||
WorkingDirectory={{ service_path }}
|
|
@ -39,6 +39,14 @@
|
|||
dest: /etc/systemd/system/healthcheck@.timer
|
||||
mode: "0644"
|
||||
become: true
|
||||
- name: Enable the system timer
|
||||
ansible.builtin.systemd_service:
|
||||
name: healthcheck@{{ item }}.timer
|
||||
state: started
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
loop: "{{ healthcheck_svc.checks }}"
|
||||
become: true
|
||||
|
||||
- name: Import tasks create a service.env file
|
||||
ansible.builtin.import_tasks: tasks/steps/template-service-env.yml
|
|
@ -1,6 +1,10 @@
|
|||
---
|
||||
healthcheck_svc:
|
||||
name: healthcheck
|
||||
checks:
|
||||
- http
|
||||
- mail
|
||||
- matrix
|
||||
|
||||
healthcheck_env:
|
||||
USER_AGENT: healthcheck-bot for serguzim.net
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
- name: Template config
|
||||
ansible.builtin.template:
|
||||
src: service.yml.j2
|
||||
src: yml.j2
|
||||
dest: "{{ (service_path, 'influxdb.yml') | path_join }}"
|
||||
mode: "0600"
|
||||
|
26
roles/reitanlage_oranienburg/tasks/main.yml
Normal file
26
roles/reitanlage_oranienburg/tasks/main.yml
Normal file
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ svc.name }}
|
||||
vars:
|
||||
svc: "{{ reitanlage_oranienburg_svc }}"
|
||||
compose: "{{ reitanlage_oranienburg_compose }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Get the Dockerfile
|
||||
ansible.builtin.get_url:
|
||||
url: https://raw.githubusercontent.com/getgrav/docker-grav/master/Dockerfile
|
||||
dest: "{{ (service_path, 'Dockerfile') | path_join }}"
|
||||
mode: "0644"
|
||||
register: cmd_result
|
||||
|
||||
- name: Set the docker rebuild flag
|
||||
ansible.builtin.set_fact:
|
||||
docker_rebuild: true
|
||||
when: cmd_result.changed # noqa: no-handler We need to handle the restart per service. Handlers don't support variables.
|
||||
|
||||
- name: Import start tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/start-common-service.yml
|
23
roles/reitanlage_oranienburg/vars/main.yml
Normal file
23
roles/reitanlage_oranienburg/vars/main.yml
Normal file
|
@ -0,0 +1,23 @@
|
|||
---
|
||||
reitanlage_oranienburg_svc:
|
||||
name: reitanlage-oranienburg
|
||||
domain: reitanlage-oranienburg.de
|
||||
www_domain: true
|
||||
port: 80
|
||||
caddy_extra: |
|
||||
header /images/* Cache-Control "max-age=31536000"
|
||||
header /assets/* Cache-Control "max-age=2629800"
|
||||
header /user/themes/* Cache-Control "max-age=2629800"
|
||||
|
||||
reitanlage_oranienburg_compose:
|
||||
watchtower: false
|
||||
image: grav
|
||||
volumes:
|
||||
- data:/var/www/html/
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
volumes:
|
||||
data:
|
|
@ -24,7 +24,7 @@
|
|||
|
||||
- name: Template config
|
||||
ansible.builtin.template:
|
||||
src: service.yml.j2
|
||||
src: yml.j2
|
||||
dest: "{{ (config_path, 'homeserver.yaml') | path_join }}"
|
||||
mode: "0644"
|
||||
|
19
roles/teamspeak_fallback/files/docker-compose.yml
Normal file
19
roles/teamspeak_fallback/files/docker-compose.yml
Normal file
|
@ -0,0 +1,19 @@
|
|||
services:
|
||||
teamspeak:
|
||||
image: teamspeak
|
||||
restart: always
|
||||
ports:
|
||||
- 9987:9987/udp
|
||||
- 10011:10011
|
||||
- 30033:30033
|
||||
environment:
|
||||
TS3SERVER_DB_PLUGIN: ts3db_sqlite3
|
||||
TS3SERVER_DB_SQLCREATEPATH: create_sqlite
|
||||
TS3SERVER_LICENSE: accept
|
||||
volumes:
|
||||
- data:/var/ts3server/
|
||||
|
||||
volumes:
|
||||
data:
|
||||
external: true
|
||||
name: teamspeak-fallback-data
|
36
roles/teamspeak_fallback/tasks/main.yml
Normal file
36
roles/teamspeak_fallback/tasks/main.yml
Normal file
|
@ -0,0 +1,36 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ svc.name }}
|
||||
vars:
|
||||
svc: "{{ teamspeak_fallback_svc }}"
|
||||
block:
|
||||
- name: Import tasks to create service directory
|
||||
ansible.builtin.import_tasks: tasks/steps/create-service-directory.yml
|
||||
|
||||
- name: Copy the docker-compose file
|
||||
ansible.builtin.copy:
|
||||
src: docker-compose.yml
|
||||
dest: "{{ (service_path, 'docker-compose.yml') | path_join }}"
|
||||
mode: "0644"
|
||||
|
||||
- name: Template the conditional-start script
|
||||
ansible.builtin.template:
|
||||
src: conditional-start.sh.j2
|
||||
dest: "{{ (service_path, 'conditional-start.sh') | path_join }}"
|
||||
mode: "0755"
|
||||
|
||||
- name: Copy the system service
|
||||
ansible.builtin.template:
|
||||
src: teamspeak-fallback.service.j2
|
||||
dest: /etc/systemd/system/teamspeak-fallback.service
|
||||
mode: "0644"
|
||||
become: true
|
||||
- name: Enable the system service
|
||||
ansible.builtin.systemd_service:
|
||||
name: teamspeak-fallback.service
|
||||
state: started
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
become: true
|
18
roles/teamspeak_fallback/templates/conditional-start.sh.j2
Normal file
18
roles/teamspeak_fallback/templates/conditional-start.sh.j2
Normal file
|
@ -0,0 +1,18 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
while true
|
||||
do
|
||||
if nc -z "{{ teamspeak_fallback_check_server }}" "{{ teamspeak_fallback_check_port }}"
|
||||
then
|
||||
if docker compose ps --services | grep teamspeak >/dev/null; then
|
||||
echo "Stopping Server"
|
||||
docker compose down
|
||||
fi
|
||||
else
|
||||
if ! docker compose ps --services | grep teamspeak >/dev/null; then
|
||||
echo "Starting Server"
|
||||
docker compose up -d --pull=always
|
||||
fi
|
||||
fi
|
||||
sleep 5
|
||||
done
|
|
@ -0,0 +1,13 @@
|
|||
[Service]
|
||||
[Unit]
|
||||
Description=Teamspeak Fallback Starter
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart={{ service_path }}/conditional-start.sh
|
||||
WorkingDirectory={{ service_path }}
|
||||
Restart=on-failure
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
6
roles/teamspeak_fallback/vars/main.yml
Normal file
6
roles/teamspeak_fallback/vars/main.yml
Normal file
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
teamspeak_fallback_check_server: ts.sneiso.eu
|
||||
teamspeak_fallback_check_port: 30033
|
||||
|
||||
teamspeak_fallback_svc:
|
||||
name: teamspeak-fallback
|
Some files were not shown because too many files have changed in this diff Show more
Reference in a new issue