Fix pre-commit hooks and move directories
roles/ and inventory/ are now in playbooks/ also fixed issues reported by ansible-lint
This commit is contained in:
parent
dc398ddb6e
commit
4104057771
123 changed files with 91 additions and 39 deletions
playbooks
for-ansible-lint.yml
inventory
roles
_TEMPLATE
acme_dns
always
authentik
backup
files
tasks
templates
vars
caddy
extra_services
faas
forgejo
forgejo_runner
gatus
healthcheck
files
tasks
templates
vars
homebox
immich
influxdb
jellyfin
lego
files
tasks
templates
vars
linkwarden
mailcowdockerized
minecraft_2
minio
ntfy
postgresql
reitanlage_oranienburg
shlink
software/tasks
synapse
tandoor
teamspeak_fallback
41
playbooks/for-ansible-lint.yml
Normal file
41
playbooks/for-ansible-lint.yml
Normal file
|
@ -0,0 +1,41 @@
|
|||
---
|
||||
- name: Run all roles
|
||||
hosts: serguzim_net
|
||||
vars:
|
||||
host_services: "{{ all_services | my_service_attributes(inventory_hostname) | union(common_services) }}"
|
||||
roles:
|
||||
- acme_dns
|
||||
- always
|
||||
- authentik
|
||||
- backup
|
||||
- caddy
|
||||
- extra_services
|
||||
- faas
|
||||
- forgejo
|
||||
- forgejo_runner
|
||||
- gatus
|
||||
- healthcheck
|
||||
- homebox
|
||||
- immich
|
||||
- influxdb
|
||||
- jellyfin
|
||||
- lego
|
||||
- linkwarden
|
||||
- mailcowdockerized
|
||||
- minecraft_2
|
||||
- minio
|
||||
- ntfy
|
||||
- postgresql
|
||||
- reitanlage_oranienburg
|
||||
- shlink
|
||||
- software
|
||||
- synapse
|
||||
- tandoor
|
||||
- teamspeak_fallback
|
||||
- telegraf
|
||||
- tinytinyrss
|
||||
- umami
|
||||
- vikunja
|
||||
- watchtower
|
||||
- wiki_js
|
||||
- woodpecker
|
37
playbooks/inventory/group_vars/all/compose_defaults.yml
Normal file
37
playbooks/inventory/group_vars/all/compose_defaults.yml
Normal file
|
@ -0,0 +1,37 @@
|
|||
compose_file_main:
|
||||
services:
|
||||
app:
|
||||
image: "{{ compose.image }}"
|
||||
restart: always
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: "{{ compose.watchtower | default(false) }}"
|
||||
|
||||
compose_file_env:
|
||||
services:
|
||||
app:
|
||||
env_file:
|
||||
- service.env
|
||||
|
||||
compose_file_networks:
|
||||
services:
|
||||
app:
|
||||
networks:
|
||||
default:
|
||||
apps:
|
||||
aliases:
|
||||
- "{{ role_name }}"
|
||||
networks:
|
||||
default:
|
||||
apps:
|
||||
external: true
|
||||
|
||||
compose_file_volumes:
|
||||
services:
|
||||
app:
|
||||
volumes: "{{ compose.volumes }}"
|
||||
|
||||
compose_file_monitoring_label:
|
||||
services:
|
||||
app:
|
||||
labels:
|
||||
com.influxdata.telegraf.enable: true
|
31
playbooks/inventory/group_vars/all/main.yml
Normal file
31
playbooks/inventory/group_vars/all/main.yml
Normal file
|
@ -0,0 +1,31 @@
|
|||
admin_email: tobias@msrg.cc
|
||||
timezone: Europe/Berlin
|
||||
|
||||
postgres:
|
||||
host: "{{ opentofu.postgresql.host }}"
|
||||
port: "{{ opentofu.postgresql.port }}"
|
||||
|
||||
mailer:
|
||||
host: mail.serguzim.me
|
||||
port: 587
|
||||
|
||||
acme_dns:
|
||||
host: acme.serguzim.me
|
||||
|
||||
container_registry:
|
||||
public: "{{ opentofu.scaleway_registry_endpoint_public }}"
|
||||
private: "{{ opentofu.scaleway_registry_endpoint_private }}"
|
||||
|
||||
|
||||
services_path: /opt/services/
|
||||
|
||||
common_services:
|
||||
- backup
|
||||
- lego
|
||||
- caddy
|
||||
- watchtower
|
||||
|
||||
caddy_path: "{{ (services_path, 'caddy') | path_join }}"
|
||||
caddy_config_path: "{{ (caddy_path, 'config', 'conf.d') | path_join }}"
|
||||
|
||||
certificates_path: "{{ (services_path, '_certificates') | path_join }}"
|
49
playbooks/inventory/serguzim.net.yml
Normal file
49
playbooks/inventory/serguzim.net.yml
Normal file
|
@ -0,0 +1,49 @@
|
|||
all:
|
||||
children:
|
||||
serguzim_net:
|
||||
hosts:
|
||||
node001:
|
||||
node002:
|
||||
node003:
|
||||
hosts:
|
||||
local-dev:
|
||||
ansible_connection: local
|
||||
|
||||
node001:
|
||||
ansible_host: "{{ opentofu.hosts.node001.fqdn_vpn }}"
|
||||
ansible_port: "{{ vault_hosts.node001.ansible_port }}"
|
||||
ansible_user: "{{ vault_hosts.node001.ansible_user }}"
|
||||
interactive_user: "{{ vault_hosts.node001.interactive_user }}"
|
||||
host_vpn:
|
||||
domain: "{{ opentofu.hosts.node001.fqdn_vpn }}"
|
||||
ip: "{{ opentofu.hosts.node001.ipv4_address_vpn }}"
|
||||
host_backup:
|
||||
hc_uid: "{{ opentofu.healthchecksio.backup.node001.id }}"
|
||||
hc_url: "{{ opentofu.healthchecksio.backup.node001.ping_url }}"
|
||||
gatus_token: "{{ vault_hosts.node001.backup.gatus_token }}"
|
||||
|
||||
node002:
|
||||
ansible_host: "{{ opentofu.hosts.node002.fqdn_vpn }}"
|
||||
ansible_port: "{{ vault_hosts.node002.ansible_port }}"
|
||||
ansible_user: "{{ vault_hosts.node002.ansible_user }}"
|
||||
interactive_user: "{{ vault_hosts.node002.interactive_user }}"
|
||||
host_vpn:
|
||||
domain: "{{ opentofu.hosts.node002.fqdn_vpn }}"
|
||||
ip: "{{ opentofu.hosts.node002.ipv4_address_vpn }}"
|
||||
host_backup:
|
||||
hc_uid: "{{ opentofu.healthchecksio.backup.node002.id }}"
|
||||
hc_url: "{{ opentofu.healthchecksio.backup.node002.ping_url }}"
|
||||
gatus_token: "{{ vault_hosts.node002.backup.gatus_token }}"
|
||||
|
||||
node003:
|
||||
ansible_host: "{{ opentofu.hosts.node003.fqdn_vpn }}"
|
||||
ansible_port: "{{ vault_hosts.node003.ansible_port }}"
|
||||
ansible_user: "{{ vault_hosts.node003.ansible_user }}"
|
||||
interactive_user: "{{ vault_hosts.node003.interactive_user }}"
|
||||
host_vpn:
|
||||
domain: "{{ opentofu.hosts.node003.fqdn_vpn }}"
|
||||
ip: "{{ opentofu.hosts.node003.ipv4_address_vpn }}"
|
||||
host_backup:
|
||||
hc_uid: "{{ opentofu.healthchecksio.backup.node003.id }}"
|
||||
hc_url: "{{ opentofu.healthchecksio.backup.node003.ping_url }}"
|
||||
gatus_token: "{{ vault_hosts.node003.backup.gatus_token }}"
|
12
playbooks/roles/_TEMPLATE/tasks/main.yml
Normal file
12
playbooks/roles/_TEMPLATE/tasks/main.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ NAME_svc }}"
|
||||
env: "{{ NAME_env }}"
|
||||
compose: "{{ NAME_compose }}"
|
||||
block:
|
||||
- name: Import tasks to deploy common service
|
||||
ansible.builtin.import_tasks: tasks/deploy-common-service.yml
|
16
playbooks/roles/_TEMPLATE/vars/main.yml
Normal file
16
playbooks/roles/_TEMPLATE/vars/main.yml
Normal file
|
@ -0,0 +1,16 @@
|
|||
---
|
||||
NAME_svc:
|
||||
domain: NAME.serguzim.me
|
||||
port: 80
|
||||
|
||||
NAME_env:
|
||||
EXAMPLE: value
|
||||
|
||||
NAME_compose:
|
||||
watchtower: true
|
||||
image:
|
||||
volumes:
|
||||
- data:/data
|
||||
file:
|
||||
volumes:
|
||||
data:
|
37
playbooks/roles/acme_dns/tasks/main.yml
Normal file
37
playbooks/roles/acme_dns/tasks/main.yml
Normal file
|
@ -0,0 +1,37 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ acme_dns_svc }}"
|
||||
env: "{{ acme_dns_env }}"
|
||||
compose: "{{ acme_dns_compose }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Setting the service config path
|
||||
ansible.builtin.set_fact:
|
||||
config_path: "{{ (service_path, 'config') | path_join }}"
|
||||
|
||||
- name: Create a service-config directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ config_path }}"
|
||||
state: directory
|
||||
mode: "0700"
|
||||
|
||||
- name: Template config
|
||||
ansible.builtin.template:
|
||||
src: config.cfg.j2
|
||||
dest: "{{ (config_path, 'config.cfg') | path_join }}"
|
||||
mode: "0600"
|
||||
register: cmd_result
|
||||
|
||||
- name: Set the docker force-recreate flag
|
||||
ansible.builtin.set_fact:
|
||||
docker_force_recreate: --force-recreate
|
||||
when: cmd_result.changed # noqa: no-handler We need to handle the restart per service. Handlers don't support variables.
|
||||
|
||||
- name: Import start tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/start-common-service.yml
|
32
playbooks/roles/acme_dns/templates/config.cfg.j2
Normal file
32
playbooks/roles/acme_dns/templates/config.cfg.j2
Normal file
|
@ -0,0 +1,32 @@
|
|||
[general]
|
||||
listen = "0.0.0.0:53"
|
||||
protocol = "both"
|
||||
domain = "{{ svc.domain }}"
|
||||
nsname = "{{ svc.domain }}"
|
||||
nsadmin = "{{ svc.nsadmin }}"
|
||||
records = [
|
||||
"{{ svc.domain }}. A {{ svc.records.a }}",
|
||||
"{{ svc.domain }}. AAAA {{ svc.records.aaaa }}",
|
||||
"{{ svc.domain }}. NS {{ svc.domain }}.",
|
||||
]
|
||||
debug = false
|
||||
|
||||
[database]
|
||||
engine = "postgres"
|
||||
connection = "postgres://{{ svc.db.user }}:{{ svc.db.pass }}@{{ svc.db.host }}/{{ svc.db.db }}"
|
||||
|
||||
[api]
|
||||
ip = "0.0.0.0"
|
||||
disable_registration = false
|
||||
port = "{{ svc.port }}"
|
||||
tls = "none"
|
||||
corsorigins = [
|
||||
"*"
|
||||
]
|
||||
use_header = true
|
||||
header_name = "X-Forwarded-For"
|
||||
|
||||
[logconfig]
|
||||
loglevel = "info"
|
||||
logtype = "stdout"
|
||||
logformat = "text"
|
27
playbooks/roles/acme_dns/vars/main.yml
Normal file
27
playbooks/roles/acme_dns/vars/main.yml
Normal file
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
acme_dns_svc:
|
||||
domain: "{{ acme_dns.host }}"
|
||||
port: 80
|
||||
nsadmin: "{{ admin_email | regex_replace('@', '.') }}"
|
||||
records:
|
||||
a: "{{ ansible_facts.default_ipv4.address }}"
|
||||
aaaa: "{{ ansible_facts.default_ipv6.address }}"
|
||||
db:
|
||||
host: "{{ postgres.host }}"
|
||||
port: "{{ postgres.port }}"
|
||||
user: "{{ opentofu.postgresql_data.acme_dns.user }}"
|
||||
pass: "{{ opentofu.postgresql_data.acme_dns.pass }}"
|
||||
db: "{{ opentofu.postgresql_data.acme_dns.database }}"
|
||||
|
||||
acme_dns_compose:
|
||||
watchtower: true
|
||||
monitoring: true
|
||||
image: joohoi/acme-dns
|
||||
volumes:
|
||||
- ./config:/etc/acme-dns:ro
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
ports:
|
||||
- "53:53"
|
||||
- 53:53/udp
|
3
playbooks/roles/always/handlers/main.yml
Normal file
3
playbooks/roles/always/handlers/main.yml
Normal file
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
- name: Reload caddy
|
||||
ansible.builtin.include_tasks: tasks/reload-caddy.yml
|
9
playbooks/roles/always/tasks/main.yml
Normal file
9
playbooks/roles/always/tasks/main.yml
Normal file
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
- name: Create the services directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ services_path }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
owner: "{{ ansible_user }}"
|
||||
group: "{{ ansible_user }}"
|
||||
become: true
|
12
playbooks/roles/authentik/tasks/main.yml
Normal file
12
playbooks/roles/authentik/tasks/main.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ authentik_svc }}"
|
||||
env: "{{ authentik_env }}"
|
||||
compose: "{{ authentik_compose }}"
|
||||
block:
|
||||
- name: Import tasks to deploy common service
|
||||
ansible.builtin.import_tasks: tasks/deploy-common-service.yml
|
60
playbooks/roles/authentik/vars/main.yml
Normal file
60
playbooks/roles/authentik/vars/main.yml
Normal file
|
@ -0,0 +1,60 @@
|
|||
---
|
||||
authentik_svc:
|
||||
domain: auth.serguzim.me
|
||||
port: 9000
|
||||
image_tag: 2024.8
|
||||
db:
|
||||
host: "{{ postgres.host }}"
|
||||
user: "{{ opentofu.postgresql_data.authentik.user }}"
|
||||
pass: "{{ opentofu.postgresql_data.authentik.pass }}"
|
||||
database: "{{ opentofu.postgresql_data.authentik.database }}"
|
||||
|
||||
authentik_env:
|
||||
AUTHENTIK_SECRET_KEY: "{{ vault_authentik.secret_key }}"
|
||||
|
||||
AUTHENTIK_EMAIL__HOST: "{{ mailer.host }}"
|
||||
AUTHENTIK_EMAIL__PORT: "{{ mailer.port }}"
|
||||
AUTHENTIK_EMAIL__USERNAME: "{{ vault_authentik.mail.user }}"
|
||||
AUTHENTIK_EMAIL__PASSWORD: "{{ vault_authentik.mail.pass }}"
|
||||
AUTHENTIK_EMAIL__USE_TLS: true
|
||||
AUTHENTIK_EMAIL__USE_SSL: false
|
||||
AUTHENTIK_EMAIL__TIMEOUT: 10
|
||||
AUTHENTIK_EMAIL__FROM: auth@serguzim.me
|
||||
|
||||
AUTHENTIK_AVATARS: none
|
||||
|
||||
AUTHENTIK_REDIS__HOST: redis
|
||||
|
||||
AUTHENTIK_POSTGRESQL__HOST: "{{ svc.db.host }}"
|
||||
AUTHENTIK_POSTGRESQL__NAME: "{{ svc.db.database }}"
|
||||
AUTHENTIK_POSTGRESQL__USER: "{{ svc.db.user }}"
|
||||
AUTHENTIK_POSTGRESQL__PASSWORD: "{{ svc.db.pass }}"
|
||||
|
||||
authentik_compose:
|
||||
watchtower: false
|
||||
image: ghcr.io/goauthentik/server:{{ svc.image_tag }}
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
command: server
|
||||
depends_on:
|
||||
- redis
|
||||
worker:
|
||||
image: ghcr.io/goauthentik/server:{{ svc.image_tag }}
|
||||
restart: always
|
||||
command: worker
|
||||
user: root
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- ./certs:/certs
|
||||
env_file:
|
||||
- service.env
|
||||
depends_on:
|
||||
- redis
|
||||
networks:
|
||||
default:
|
||||
redis:
|
||||
image: redis:alpine
|
||||
restart: always
|
||||
networks:
|
||||
default:
|
4
playbooks/roles/backup/files/backup.timer
Normal file
4
playbooks/roles/backup/files/backup.timer
Normal file
|
@ -0,0 +1,4 @@
|
|||
[Timer]
|
||||
OnCalendar=*-*-* 04:10:00
|
||||
[Install]
|
||||
WantedBy=timers.target
|
6
playbooks/roles/backup/files/hooks/immich_database
Executable file
6
playbooks/roles/backup/files/hooks/immich_database
Executable file
|
@ -0,0 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
backup_path="$1"
|
||||
|
||||
cd /opt/services/immich || exit
|
||||
docker compose exec database sh -c 'pg_dump -U "$DB_USERNAME" "$DB_DATABASE"' | gzip >"$backup_path/immich.sql.gz"
|
5
playbooks/roles/backup/files/hooks/mailcowdockerized
Executable file
5
playbooks/roles/backup/files/hooks/mailcowdockerized
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
export MAILCOW_BACKUP_LOCATION="$1"
|
||||
|
||||
/opt/mailcow-dockerized/helper-scripts/backup_and_restore.sh backup all --delete-days 1
|
15
playbooks/roles/backup/files/hooks/postgresql
Executable file
15
playbooks/roles/backup/files/hooks/postgresql
Executable file
|
@ -0,0 +1,15 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
cd "$1"
|
||||
|
||||
postgres_tables=$(sudo -u postgres psql -Atc "SELECT datname FROM pg_database WHERE datistemplate = false;")
|
||||
|
||||
for i in $postgres_tables
|
||||
do
|
||||
printf "dumping %s ..." "$i"
|
||||
sudo -u postgres pg_dump "$i" | gzip >"pg_dump_$i.sql.gz"
|
||||
echo " done"
|
||||
done
|
||||
|
||||
echo "dumping all"
|
||||
sudo -u postgres pg_dumpall | gzip >"pg_dumpall.sql.gz"
|
21
playbooks/roles/backup/tasks/hooks.yml
Normal file
21
playbooks/roles/backup/tasks/hooks.yml
Normal file
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
- name: Set hooks path
|
||||
ansible.builtin.set_fact:
|
||||
hooks_path: "{{ (service_path, 'hooks') | path_join }}"
|
||||
- name: Create hooks directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ hooks_path }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
- name: Copy the hooks
|
||||
ansible.builtin.copy:
|
||||
src: hooks/
|
||||
dest: "{{ hooks_path }}"
|
||||
mode: "0755"
|
||||
- name: Create the from directories
|
||||
ansible.builtin.file:
|
||||
path: "{{ ('/opt/services/_backup', item | basename) | path_join }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
with_fileglob:
|
||||
- "hooks/*"
|
46
playbooks/roles/backup/tasks/main.yml
Normal file
46
playbooks/roles/backup/tasks/main.yml
Normal file
|
@ -0,0 +1,46 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
yml: "{{ backup_yml }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Template the main backup script
|
||||
ansible.builtin.template:
|
||||
src: backup.sh.j2
|
||||
dest: "{{ (service_path, 'backup.sh') | path_join }}"
|
||||
mode: "0755"
|
||||
|
||||
- name: Template autorestic.yml
|
||||
ansible.builtin.template:
|
||||
src: yml.j2
|
||||
dest: "{{ (service_path, '.autorestic.yml') | path_join }}"
|
||||
mode: "0644"
|
||||
|
||||
- name: Template autorestic.all.yml
|
||||
ansible.builtin.template:
|
||||
src: yml.j2
|
||||
dest: "{{ (service_path, '.autorestic.all.yml') | path_join }}"
|
||||
mode: "0644"
|
||||
vars:
|
||||
yml: "{{ backup_yml_all }}"
|
||||
|
||||
- name: Import tasks specific to the hooks scripts
|
||||
ansible.builtin.import_tasks: hooks.yml
|
||||
- name: Import tasks specific to systemd
|
||||
ansible.builtin.import_tasks: systemd.yml
|
||||
|
||||
- name: Verify service
|
||||
ansible.builtin.command:
|
||||
cmd: autorestic -v check
|
||||
chdir: "{{ service_path }}"
|
||||
changed_when: false
|
||||
become: true
|
||||
register: cmd_result_verify
|
||||
until: "cmd_result_verify is not failed"
|
||||
retries: 10
|
||||
delay: 10
|
20
playbooks/roles/backup/tasks/systemd.yml
Normal file
20
playbooks/roles/backup/tasks/systemd.yml
Normal file
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
- name: Copy the system service
|
||||
ansible.builtin.template:
|
||||
src: backup.service.j2
|
||||
dest: /etc/systemd/system/backup.service
|
||||
mode: "0644"
|
||||
become: true
|
||||
- name: Copy the system timer
|
||||
ansible.builtin.copy:
|
||||
src: backup.timer
|
||||
dest: /etc/systemd/system/backup.timer
|
||||
mode: "0644"
|
||||
become: true
|
||||
- name: Enable the system timer
|
||||
ansible.builtin.systemd_service:
|
||||
name: backup.timer
|
||||
state: started
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
become: true
|
11
playbooks/roles/backup/templates/backup.service.j2
Normal file
11
playbooks/roles/backup/templates/backup.service.j2
Normal file
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Run the backup script
|
||||
StartLimitIntervalSec=7200
|
||||
StartLimitBurst=5
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart={{ (service_path, 'backup.sh') | path_join }}
|
||||
WorkingDirectory={{ service_path }}
|
||||
Restart=on-failure
|
||||
RestartSec=15min
|
12
playbooks/roles/backup/templates/backup.sh.j2
Normal file
12
playbooks/roles/backup/templates/backup.sh.j2
Normal file
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
{{ backup_hc_command_start }}
|
||||
|
||||
if autorestic backup -av --ci
|
||||
then
|
||||
{{ backup_hc_command_success }}
|
||||
{{ backup_gatus_command_success }}
|
||||
else
|
||||
{{ backup_hc_command_fail }}
|
||||
{{ backup_gatus_command_fail }}
|
||||
fi
|
58
playbooks/roles/backup/vars/main.yml
Normal file
58
playbooks/roles/backup/vars/main.yml
Normal file
|
@ -0,0 +1,58 @@
|
|||
---
|
||||
backup_list: "{{ all_services | my_service_attributes(inventory_hostname, 'backup') }}"
|
||||
backup_list_all: "{{ all_services | my_service_attributes('', 'backup') }}"
|
||||
|
||||
backup_msg_start: "Backup started"
|
||||
backup_msg_fail: "Backup failed"
|
||||
backup_msg_fail_location: "Backup failed for location: "
|
||||
backup_msg_success: "Backup successful"
|
||||
|
||||
backup_curl_base: 'curl -L -m 10 --retry 5'
|
||||
backup_hc_curl_base: '{{ backup_curl_base }} -X POST -H "Content-Type: text/plain"'
|
||||
backup_gatus_curl_base: '{{ backup_curl_base }} -X POST -H "Authorization: Bearer {{ host_backup.gatus_token }}"'
|
||||
backup_hc_url: '{{ host_backup.hc_url }}'
|
||||
backup_gatus_url: 'https://status.serguzim.me/api/v1/endpoints/8-backups_backup@{{ ansible_facts.hostname }}/external'
|
||||
|
||||
backup_hc_command_start: '{{ backup_hc_curl_base }} --data "{{ backup_msg_start }}" {{ backup_hc_url }}/start'
|
||||
backup_hc_command_success: '{{ backup_hc_curl_base }} --data "{{ backup_msg_success }}" {{ backup_hc_url }}'
|
||||
backup_hc_command_fail: '{{ backup_hc_curl_base }} --data "{{ backup_msg_fail }}" {{ backup_hc_url }}/fail'
|
||||
|
||||
backup_gatus_command_success: '{{ backup_gatus_curl_base }} "{{ backup_gatus_url }}?success=true"'
|
||||
backup_gatus_command_fail: '{{ backup_gatus_curl_base }} "{{ backup_gatus_url }}?success=false&error={{ backup_msg_fail | urlencode }}"'
|
||||
|
||||
backup_default_hooks:
|
||||
failure:
|
||||
- '{{ backup_hc_curl_base }} --data "{{ backup_msg_fail_location }}${AUTORESTIC_LOCATION}" {{ backup_hc_url }}/fail'
|
||||
- '{{ backup_gatus_curl_base }} "{{ backup_gatus_url }}?success=false&error={{ backup_msg_fail_location | urlencode }}${AUTORESTIC_LOCATION}'
|
||||
|
||||
backup_global:
|
||||
all:
|
||||
cache-dir: "{{ (service_path, 'cache') | path_join }}"
|
||||
retry-lock: 5m
|
||||
forget:
|
||||
keep-last: 7
|
||||
keep-daily: 14
|
||||
keep-weekly: 16
|
||||
keep-monthly: 12
|
||||
keep-yearly: 2
|
||||
host: "{{ ansible_facts.hostname }}"
|
||||
backup:
|
||||
host: "{{ ansible_facts.hostname }}"
|
||||
|
||||
backup_yml:
|
||||
version: 2
|
||||
|
||||
backends: "{{ vault_backup.backends }}"
|
||||
|
||||
locations: "{{ backup_list | map_backup_locations(vault_backup.backends, backup_default_hooks) }}"
|
||||
|
||||
global: "{{ backup_global }}"
|
||||
|
||||
backup_yml_all:
|
||||
version: 2
|
||||
|
||||
backends: "{{ vault_backup.backends }}"
|
||||
|
||||
locations: "{{ backup_list_all | map_backup_locations(vault_backup.backends, backup_default_hooks) }}"
|
||||
|
||||
global: "{{ backup_global }}"
|
1
playbooks/roles/caddy/defaults/main.yml
Normal file
1
playbooks/roles/caddy/defaults/main.yml
Normal file
|
@ -0,0 +1 @@
|
|||
caddy_ports_extra: []
|
8
playbooks/roles/caddy/files/Dockerfile
Normal file
8
playbooks/roles/caddy/files/Dockerfile
Normal file
|
@ -0,0 +1,8 @@
|
|||
FROM caddy:2-builder AS builder
|
||||
|
||||
RUN xcaddy build \
|
||||
--with github.com/caddy-dns/acmedns@main
|
||||
|
||||
FROM caddy:2-alpine
|
||||
|
||||
COPY --from=builder /usr/bin/caddy /usr/bin/caddy
|
46
playbooks/roles/caddy/files/snippets
Normal file
46
playbooks/roles/caddy/files/snippets
Normal file
|
@ -0,0 +1,46 @@
|
|||
(auth_serguzim_me) {
|
||||
# always forward outpost path to actual outpost
|
||||
reverse_proxy /outpost.goauthentik.io/* authentik:9000
|
||||
|
||||
# forward authentication to outpost
|
||||
forward_auth authentik:9000 {
|
||||
uri /outpost.goauthentik.io/auth/caddy
|
||||
|
||||
# capitalization of the headers is important, otherwise they will be empty
|
||||
copy_headers X-Authentik-Username X-Authentik-Groups X-Authentik-Email X-Authentik-Name X-Authentik-Uid X-Authentik-Jwt X-Authentik-Meta-Jwks X-Authentik-Meta-Outpost X-Authentik-Meta-Provider X-Authentik-Meta-App X-Authentik-Meta-Version
|
||||
|
||||
# optional, in this config trust all private ranges, should probably be set to the outposts IP
|
||||
trusted_proxies private_ranges
|
||||
}
|
||||
}
|
||||
|
||||
(default) {
|
||||
encode zstd gzip
|
||||
}
|
||||
|
||||
(acmedns) {
|
||||
tls {
|
||||
dns acmedns {
|
||||
username "{$ACMEDNS_USER}"
|
||||
password "{$ACMEDNS_PASS}"
|
||||
subdomain "{$ACMEDNS_SUBD}"
|
||||
server_url "{$ACMEDNS_URL}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(faas) {
|
||||
rewrite * /function/{args[0]}{uri}
|
||||
reverse_proxy https://faas.serguzim.me {
|
||||
header_up Host {http.reverse_proxy.upstream.hostport}
|
||||
}
|
||||
}
|
||||
|
||||
(analytics) {
|
||||
handle_path /_a/* {
|
||||
reverse_proxy https://analytics.serguzim.me {
|
||||
header_up X-Analytics-IP {remote}
|
||||
header_up Host {http.reverse_proxy.upstream.hostport}
|
||||
}
|
||||
}
|
||||
}
|
56
playbooks/roles/caddy/tasks/main.yml
Normal file
56
playbooks/roles/caddy/tasks/main.yml
Normal file
|
@ -0,0 +1,56 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
env: "{{ caddy_env }}"
|
||||
compose: "{{ caddy_compose }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Copy the Dockerfile
|
||||
ansible.builtin.copy:
|
||||
src: Dockerfile
|
||||
dest: "{{ (service_path, 'Dockerfile') | path_join }}"
|
||||
mode: "0644"
|
||||
register: cmd_result
|
||||
|
||||
- name: Set the docker rebuild flag
|
||||
ansible.builtin.set_fact:
|
||||
docker_rebuild: true
|
||||
when: cmd_result.changed # noqa: no-handler We need to handle the restart per service. Handlers don't support variables.
|
||||
|
||||
- name: Set caddy config path
|
||||
ansible.builtin.set_fact:
|
||||
config_path: "{{ (service_path, 'config') | path_join }}"
|
||||
|
||||
- name: Create config directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ config_path }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
|
||||
- name: Template caddyfile
|
||||
ansible.builtin.template:
|
||||
src: Caddyfile.j2
|
||||
dest: "{{ (config_path, 'Caddyfile') | path_join }}"
|
||||
mode: "0644"
|
||||
notify: Reload caddy
|
||||
|
||||
- name: Copy snippets file
|
||||
ansible.builtin.copy:
|
||||
src: snippets
|
||||
dest: "{{ (config_path, 'snippets') | path_join }}"
|
||||
mode: "0644"
|
||||
notify: Reload caddy
|
||||
|
||||
- name: Create sites-config directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ caddy_config_path }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
|
||||
- name: Import start tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/start-common-service.yml
|
11
playbooks/roles/caddy/templates/Caddyfile.j2
Normal file
11
playbooks/roles/caddy/templates/Caddyfile.j2
Normal file
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
email {{ admin_email }}
|
||||
|
||||
servers {
|
||||
metrics
|
||||
strict_sni_host on
|
||||
}
|
||||
}
|
||||
|
||||
import /etc/caddy/snippets
|
||||
import /etc/caddy/conf.d/*.conf
|
38
playbooks/roles/caddy/vars/main.yml
Normal file
38
playbooks/roles/caddy/vars/main.yml
Normal file
|
@ -0,0 +1,38 @@
|
|||
---
|
||||
caddy_acmedns_user: "{{ vault_caddy.acmedns.user }}"
|
||||
caddy_acmedns_pass: "{{ vault_caddy.acmedns.pass }}"
|
||||
caddy_acmedns_subd: "{{ vault_caddy.acmedns.subd }}"
|
||||
caddy_acmedns_url: "https://{{ acme_dns.host }}"
|
||||
|
||||
caddy_ports_default:
|
||||
- 80:80
|
||||
- 443:443
|
||||
- 443:443/udp
|
||||
- "{{ host_vpn.ip }}:2019:2019"
|
||||
caddy_ports_extra: "{{ all_services | my_service_attributes(inventory_hostname, 'ports') }}"
|
||||
caddy_ports: "{{ caddy_ports_default | union(caddy_ports_extra) }}"
|
||||
|
||||
caddy_env:
|
||||
CADDY_ADMIN: 0.0.0.0:2019
|
||||
|
||||
ACMEDNS_USER: "{{ caddy_acmedns_user }}"
|
||||
ACMEDNS_PASS: "{{ caddy_acmedns_pass }}"
|
||||
ACMEDNS_SUBD: "{{ caddy_acmedns_subd }}"
|
||||
ACMEDNS_URL: "{{ caddy_acmedns_url }}"
|
||||
|
||||
caddy_compose:
|
||||
watchtower: false
|
||||
image: "{{ (container_registry.public, 'services/caddy:2-alpine') | path_join }}"
|
||||
volumes:
|
||||
- "./config:/etc/caddy/"
|
||||
- data:/data
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
ports: "{{ caddy_ports }}"
|
||||
extra_hosts:
|
||||
- host.docker.internal:host-gateway
|
||||
volumes:
|
||||
data:
|
10
playbooks/roles/extra_services/tasks/main.yml
Normal file
10
playbooks/roles/extra_services/tasks/main.yml
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy extra services
|
||||
vars:
|
||||
svc: "{{ extra_services_svc }}"
|
||||
block:
|
||||
- name: Import tasks to template the site and functions for the reverse proxy
|
||||
ansible.builtin.include_tasks: tasks/steps/template-site-config.yml
|
3
playbooks/roles/extra_services/vars/main.yml
Normal file
3
playbooks/roles/extra_services/vars/main.yml
Normal file
|
@ -0,0 +1,3 @@
|
|||
---
|
||||
extra_services_svc:
|
||||
extra_svcs: "{{ vault_extra_services }}"
|
10
playbooks/roles/faas/tasks/main.yml
Normal file
10
playbooks/roles/faas/tasks/main.yml
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ faas_svc }}"
|
||||
block:
|
||||
- name: Import tasks to template the site and functions for the reverse proxy
|
||||
ansible.builtin.import_tasks: tasks/steps/template-site-config.yml
|
23
playbooks/roles/faas/vars/main.yml
Normal file
23
playbooks/roles/faas/vars/main.yml
Normal file
|
@ -0,0 +1,23 @@
|
|||
---
|
||||
faas_svc:
|
||||
domain: faas.serguzim.me
|
||||
docker_host: host.docker.internal
|
||||
port: 8080
|
||||
extra_svcs:
|
||||
- domain: serguzim.me
|
||||
faas_function: webpage-serguzim-me
|
||||
www_domain: true
|
||||
hsts: true
|
||||
caddy_extra: |
|
||||
header /.well-known/* Access-Control-Allow-Origin *
|
||||
|
||||
handle /.well-known/webfinger {
|
||||
map {query.resource} {user} {
|
||||
acct:tobias@msrg.cc serguzim
|
||||
acct:serguzim@msrg.cc serguzim
|
||||
}
|
||||
rewrite * /.well-known/webfinger/{user}.json
|
||||
import faas webpage-msrg-cc
|
||||
}
|
||||
- domain: xn--sder-5qa.stream
|
||||
faas_function: webpage-soeder-stream
|
|
@ -0,0 +1 @@
|
|||
<a class="item" href="https://www.serguzim.me/imprint/">Impressum</a>
|
39
playbooks/roles/forgejo/tasks/main.yml
Normal file
39
playbooks/roles/forgejo/tasks/main.yml
Normal file
|
@ -0,0 +1,39 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ forgejo_svc }}"
|
||||
env: "{{ forgejo_env }}"
|
||||
compose: "{{ forgejo_compose }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Copy the template files
|
||||
ansible.builtin.copy:
|
||||
src: templates/
|
||||
dest: "{{ (service_path, 'templates') | path_join }}"
|
||||
mode: "0644"
|
||||
register: cmd_result
|
||||
|
||||
- name: Set the docker force-recreate flag
|
||||
ansible.builtin.set_fact:
|
||||
docker_force_recreate: --force-recreate
|
||||
when: cmd_result.changed # noqa: no-handler We need to handle the restart per service. Handlers don't support variables.
|
||||
|
||||
- name: Template the custom footer
|
||||
ansible.builtin.template:
|
||||
src: footer.tmpl.j2
|
||||
dest: "{{ (service_path, 'templates', 'custom', 'footer.tmpl') | path_join }}"
|
||||
mode: "0644"
|
||||
register: cmd_result
|
||||
|
||||
- name: Set the docker force-recreate flag
|
||||
ansible.builtin.set_fact:
|
||||
docker_force_recreate: --force-recreate
|
||||
when: cmd_result.changed # noqa: no-handler We need to handle the restart per service. Handlers don't support variables.
|
||||
|
||||
- name: Import start tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/start-common-service.yml
|
1
playbooks/roles/forgejo/templates/footer.tmpl.j2
Normal file
1
playbooks/roles/forgejo/templates/footer.tmpl.j2
Normal file
|
@ -0,0 +1 @@
|
|||
<script async src="/_a/script.js" data-website-id="{{ vault_forgejo.umami }}"></script>
|
97
playbooks/roles/forgejo/vars/main.yml
Normal file
97
playbooks/roles/forgejo/vars/main.yml
Normal file
|
@ -0,0 +1,97 @@
|
|||
---
|
||||
forgejo_svc:
|
||||
domain: git.serguzim.me
|
||||
port: 3000
|
||||
caddy_extra: |
|
||||
import analytics
|
||||
header /attachments/* Access-Control-Allow-Origin *
|
||||
db:
|
||||
host: "{{ postgres.host }}"
|
||||
port: "{{ postgres.port }}"
|
||||
ssh_port: 22
|
||||
|
||||
forgejo_env:
|
||||
FORGEJO__database__DB_TYPE: postgres
|
||||
FORGEJO__database__HOST: "{{ svc.db.host }}:{{ svc.db.port }}"
|
||||
FORGEJO__database__NAME: "{{ opentofu.postgresql_service_roles.forgejo.database }}"
|
||||
FORGEJO__database__USER: "{{ opentofu.postgresql_service_roles.forgejo.user }}"
|
||||
FORGEJO__database__PASSWD: "{{ opentofu.postgresql_service_roles.forgejo.pass }}"
|
||||
FORGEJO__database__SSL_MODE: verify-full
|
||||
|
||||
FORGEJO__repository__ENABLE_PUSH_CREATE_USER: true
|
||||
FORGEJO__repository__ENABLE_PUSH_CREATE_ORG: true
|
||||
FORGEJO__repository__DEFAULT_BRANCH: main
|
||||
|
||||
FORGEJO__cors__ENABLED: true
|
||||
FORGEJO__cors__SCHEME: https
|
||||
|
||||
FORGEJO__ui__DEFAULT_THEME: forgejo-dark
|
||||
|
||||
FORGEJO__server__DOMAIN: "{{ svc.domain }}"
|
||||
FORGEJO__server__SSH_DOMAIN: "{{ svc.domain }}"
|
||||
FORGEJO__server__SSH_PORT: "{{ svc.ssh_port }}"
|
||||
FORGEJO__server__ROOT_URL: https://{{ svc.domain }}
|
||||
FORGEJO__server__OFFLINE_MODE: true
|
||||
FORGEJO__server__LFS_JWT_SECRET: "{{ vault_forgejo.server_lfs_jwt_secret }}"
|
||||
FORGEJO__server__LFS_START_SERVER: true
|
||||
|
||||
FORGEJO__security__INSTALL_LOCK: true
|
||||
FORGEJO__security__INTERNAL_TOKEN: "{{ vault_forgejo.security_internal_token }}"
|
||||
FORGEJO__security__SECRET_KEY: "{{ vault_forgejo.security_secret_key }}"
|
||||
|
||||
FORGEJO__openid__ENABLE_OPENID_SIGNUP: true
|
||||
FORGEJO__openid__ENABLE_OPENID_SIGNIN: false
|
||||
|
||||
FORGEJO__service__ALLOW_ONLY_EXTERNAL_REGISTRATION: true
|
||||
FORGEJO__service__ENABLE_BASIC_AUTHENTICATION: false
|
||||
FORGEJO__service__DEFAULT_KEEP_EMAIL_PRIVATE: true
|
||||
FORGEJO__service__NO_REPLY_ADDRESS: discard.msrg.cc
|
||||
|
||||
FORGEJO__webhook__DELIVER_TIMEOUT: 60
|
||||
|
||||
FORGEJO__mailer__ENABLED: true
|
||||
FORGEJO__mailer__PROTOCOL: smtp+starttls
|
||||
FORGEJO__mailer__SMTP_ADDR: mail.serguzim.me
|
||||
FORGEJO__mailer__SMTP_PORT: 587
|
||||
FORGEJO__mailer__FROM: Forgejo <git@serguzim.me>
|
||||
FORGEJO__mailer__USER: git@serguzim.me
|
||||
FORGEJO__mailer__PASSWD: "{{ vault_forgejo.mailer_passwd }}"
|
||||
FORGEJO__mailer__SEND_AS_PLAIN_TEXT: true
|
||||
|
||||
FORGEJO__picture__DISABLE_GRAVATAR: true
|
||||
|
||||
FORGEJO__attachment__MAX_FILES: 10
|
||||
|
||||
FORGEJO__oauth2__JWT_SECRET: "{{ vault_forgejo.oauth2_jwt_secret }}"
|
||||
|
||||
FORGEJO__metrics__ENABLED: true
|
||||
FORGEJO__metrics__TOKEN: "{{ vault_metrics_token }}"
|
||||
|
||||
FORGEJO__actions__ENABLED: true
|
||||
|
||||
FORGEJO__storage__STORAGE_TYPE: minio
|
||||
FORGEJO__storage__MINIO_ENDPOINT: "{{ opentofu.scaleway_data.forgejo.api_endpoint | urlsplit('hostname') }}"
|
||||
FORGEJO__storage__MINIO_ACCESS_KEY_ID: "{{ opentofu.scaleway_data.forgejo.access_key }}"
|
||||
FORGEJO__storage__MINIO_SECRET_ACCESS_KEY: "{{ opentofu.scaleway_data.forgejo.secret_key }}"
|
||||
FORGEJO__storage__MINIO_BUCKET: "{{ opentofu.scaleway_data.forgejo.name }}"
|
||||
FORGEJO__storage__MINIO_LOCATION: "{{ opentofu.scaleway_data.forgejo.region }}"
|
||||
FORGEJO__storage__MINIO_USE_SSL: true
|
||||
|
||||
FORGEJO__other__SHOW_FOOTER_VERSION: true
|
||||
FORGEJO__other__SHOW_FOOTER_TEMPLATE_LOAD_TIME: false
|
||||
|
||||
forgejo_compose:
|
||||
watchtower: true
|
||||
image: codeberg.org/forgejo/forgejo:7.0
|
||||
volumes:
|
||||
- data:/data
|
||||
- ./templates:/data/gitea/templates
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
ports:
|
||||
- "{{ svc.ssh_port }}:{{ svc.ssh_port }}"
|
||||
volumes:
|
||||
data:
|
81
playbooks/roles/forgejo_runner/files/config.yml
Normal file
81
playbooks/roles/forgejo_runner/files/config.yml
Normal file
|
@ -0,0 +1,81 @@
|
|||
log:
|
||||
# The level of logging, can be trace, debug, info, warn, error, fatal
|
||||
level: info
|
||||
|
||||
runner:
|
||||
# Where to store the registration result.
|
||||
file: /data/.runner
|
||||
# Execute how many tasks concurrently at the same time.
|
||||
capacity: 1
|
||||
# Extra environment variables to run jobs.
|
||||
#envs:
|
||||
# A_TEST_ENV_NAME_1: a_test_env_value_1
|
||||
# A_TEST_ENV_NAME_2: a_test_env_value_2
|
||||
# Extra environment variables to run jobs from a file.
|
||||
# It will be ignored if it's empty or the file doesn't exist.
|
||||
#env_file: .env
|
||||
# The timeout for a job to be finished.
|
||||
# Please note that the Forgejo instance also has a timeout (3h by default) for the job.
|
||||
# So the job could be stopped by the Forgejo instance if it's timeout is shorter than this.
|
||||
timeout: 3h
|
||||
# Whether skip verifying the TLS certificate of the Forgejo instance.
|
||||
insecure: false
|
||||
# The timeout for fetching the job from the Forgejo instance.
|
||||
fetch_timeout: 5s
|
||||
# The interval for fetching the job from the Forgejo instance.
|
||||
fetch_interval: 2s
|
||||
# The labels of a runner are used to determine which jobs the runner can run, and how to run them.
|
||||
# Like: ["macos-arm64:host", "ubuntu-latest:docker://node:16-bullseye", "ubuntu-22.04:docker://node:16-bullseye"]
|
||||
# If it's empty when registering, it will ask for inputting labels.
|
||||
# If it's empty when execute `deamon`, will use labels in `.runner` file.
|
||||
labels: []
|
||||
|
||||
cache:
|
||||
# Enable cache server to use actions/cache.
|
||||
enabled: true
|
||||
# The directory to store the cache data.
|
||||
# If it's empty, the cache data will be stored in $HOME/.cache/actcache.
|
||||
dir: ""
|
||||
# The host of the cache server.
|
||||
# It's not for the address to listen, but the address to connect from job containers.
|
||||
# So 0.0.0.0 is a bad choice, leave it empty to detect automatically.
|
||||
host: ""
|
||||
# The port of the cache server.
|
||||
# 0 means to use a random available port.
|
||||
port: 0
|
||||
|
||||
container:
|
||||
# Specifies the network to which the container will connect.
|
||||
# Could be host, bridge or the name of a custom network.
|
||||
# If it's empty, create a network automatically.
|
||||
network: ""
|
||||
# Whether to create networks with IPv6 enabled. Requires the Docker daemon to be set up accordingly.
|
||||
# Only takes effect if "network" is set to "".
|
||||
enable_ipv6: false
|
||||
# Whether to use privileged mode or not when launching task containers (privileged mode is required for Docker-in-Docker).
|
||||
privileged: false
|
||||
# And other options to be used when the container is started (eg, --add-host=my.forgejo.url:host-gateway).
|
||||
options:
|
||||
# The parent directory of a job's working directory.
|
||||
# If it's empty, /workspace will be used.
|
||||
workdir_parent:
|
||||
# Volumes (including bind mounts) can be mounted to containers. Glob syntax is supported, see https://github.com/gobwas/glob
|
||||
# You can specify multiple volumes. If the sequence is empty, no volumes can be mounted.
|
||||
# For example, if you only allow containers to mount the `data` volume and all the json files in `/src`, you should change the config to:
|
||||
# valid_volumes:
|
||||
# - data
|
||||
# - /src/*.json
|
||||
# If you want to allow any volume, please use the following configuration:
|
||||
# valid_volumes:
|
||||
# - '**'
|
||||
valid_volumes: []
|
||||
# overrides the docker client host with the specified one.
|
||||
# If it's empty, act_runner will find an available docker host automatically.
|
||||
# If it's "-", act_runner will find an available docker host automatically, but the docker host won't be mounted to the job containers and service containers.
|
||||
# If it's not empty or "-", the specified docker host will be used. An error will be returned if it doesn't work.
|
||||
docker_host: ""
|
||||
|
||||
host:
|
||||
# The parent directory of a job's working directory.
|
||||
# If it's empty, $HOME/.cache/act/ will be used.
|
||||
workdir_parent:
|
41
playbooks/roles/forgejo_runner/tasks/main.yml
Normal file
41
playbooks/roles/forgejo_runner/tasks/main.yml
Normal file
|
@ -0,0 +1,41 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
env: "{{ forgejo_runner_env }}"
|
||||
compose: "{{ forgejo_runner_compose }}"
|
||||
block:
|
||||
- name: Import tasks to create service directory
|
||||
ansible.builtin.import_tasks: tasks/steps/create-service-directory.yml
|
||||
- name: Import tasks to template docker compose file
|
||||
ansible.builtin.import_tasks: tasks/steps/template-docker-compose.yml
|
||||
|
||||
- name: Copy the config
|
||||
ansible.builtin.copy:
|
||||
src: config.yml
|
||||
dest: "{{ (service_path, 'config.yml') | path_join }}"
|
||||
mode: "0755"
|
||||
|
||||
- name: Check if service.env already exists
|
||||
ansible.builtin.stat:
|
||||
path: "{{ (service_path, 'service.env') | path_join }}"
|
||||
register: env_file
|
||||
|
||||
- name: Import tasks to prompt for the registration token
|
||||
ansible.builtin.import_tasks: tasks/prompt-registration-token.yml
|
||||
when: not env_file.stat.exists or force_forgejo_runner_registration | default(False)
|
||||
|
||||
- name: Import tasks create a service.env file
|
||||
ansible.builtin.import_tasks: tasks/steps/template-service-env.yml
|
||||
- name: Import start tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/start-common-service.yml
|
||||
|
||||
- name: Register runner
|
||||
ansible.builtin.command:
|
||||
cmd: docker compose run --rm -it app sh -c
|
||||
'forgejo-runner register --no-interactive --token ${FORGEJO_RUNNER_REGISTRATION_TOKEN} --instance ${FORGEJO_INSTANCE_URL}'
|
||||
chdir: "{{ service_path }}"
|
||||
when: not env_file.stat.exists or force_forgejo_runner_registration | default(False)
|
||||
changed_when: true # "when" checks enough. We are sure to change something here.
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
- name: Input forgejo-runner registration token
|
||||
ansible.builtin.pause:
|
||||
prompt: Enter a secret
|
||||
echo: false
|
||||
register: promt_registration_token
|
||||
|
||||
- name: Put registration token into env vars
|
||||
ansible.builtin.set_fact:
|
||||
forgejo_runner_env: "{{ forgejo_runner_env | combine({'FORGEJO_RUNNER_REGISTRATION_TOKEN': promt_registration_token.user_input}, recursive=True) }}"
|
29
playbooks/roles/forgejo_runner/vars/main.yml
Normal file
29
playbooks/roles/forgejo_runner/vars/main.yml
Normal file
|
@ -0,0 +1,29 @@
|
|||
---
|
||||
forgejo_runner_env:
|
||||
FORGEJO_INSTANCE_URL: https://git.serguzim.me/
|
||||
FORGEJO_RUNNER_REGISTRATION_TOKEN:
|
||||
DOCKER_HOST: tcp://docker-in-docker:2375
|
||||
|
||||
forgejo_runner_compose:
|
||||
watchtower: true
|
||||
image: code.forgejo.org/forgejo/runner:3.3.0
|
||||
volumes:
|
||||
- ./config.yml:/config/config.yml
|
||||
- data:/data
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
hostname: "{{ ansible_facts.hostname }}"
|
||||
command: forgejo-runner --config /config/config.yml daemon
|
||||
depends_on:
|
||||
- docker-in-docker
|
||||
links:
|
||||
- docker-in-docker
|
||||
docker-in-docker:
|
||||
image: docker:dind
|
||||
privileged: true
|
||||
command: dockerd -H tcp://0.0.0.0:2375 --tls=false
|
||||
networks:
|
||||
default:
|
||||
volumes:
|
||||
data:
|
27
playbooks/roles/gatus/tasks/main.yml
Normal file
27
playbooks/roles/gatus/tasks/main.yml
Normal file
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ gatus_svc }}"
|
||||
yml: "{{ gatus_yml }}"
|
||||
compose: "{{ gatus_compose }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Template config
|
||||
ansible.builtin.template:
|
||||
src: yml.j2
|
||||
dest: "{{ (service_path, 'config.yaml') | path_join }}"
|
||||
mode: "0644"
|
||||
register: cmd_result
|
||||
|
||||
- name: Set the docker force-recreate flag
|
||||
ansible.builtin.set_fact:
|
||||
docker_force_recreate: --force-recreate
|
||||
when: cmd_result.changed # noqa: no-handler We need to handle the restart per service. Handlers don't support variables.
|
||||
|
||||
- name: Import start tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/start-common-service.yml
|
75
playbooks/roles/gatus/vars/main.yml
Normal file
75
playbooks/roles/gatus/vars/main.yml
Normal file
|
@ -0,0 +1,75 @@
|
|||
---
|
||||
gatus_svc:
|
||||
domain: status.serguzim.me
|
||||
port: 8080
|
||||
|
||||
gatus_external_endpoints_backups: "{{ vault_hosts | vault_hosts_backup_to_gatus() }}"
|
||||
|
||||
gatus_endpoints_hosts: "{{ opentofu.hosts | hosts_to_gatus() }}"
|
||||
gatus_endpoints_services: "{{ all_services | services_to_gatus() }}"
|
||||
|
||||
gatus_federation_tester: "https://federationtester.matrix.org/api/report?server_name=msrg.cc"
|
||||
|
||||
gatus_endpoints_other:
|
||||
- name: matrix-federation
|
||||
url: "{{ gatus_federation_tester }}"
|
||||
group: 9-external
|
||||
interval: 5m
|
||||
conditions:
|
||||
- '[STATUS] == 200'
|
||||
- '[BODY].FederationOK == true'
|
||||
ui:
|
||||
hide-url: true
|
||||
alerts:
|
||||
- type: ntfy
|
||||
send-on-resolved: true
|
||||
- type: email
|
||||
send-on-resolved: true
|
||||
- name: healthchecks-io
|
||||
url: "{{ opentofu.healthchecksio.status.ping_url }}"
|
||||
group: 9-external
|
||||
interval: 5m
|
||||
conditions:
|
||||
- '[STATUS] == 200'
|
||||
ui:
|
||||
hide-url: true
|
||||
|
||||
gatus_yml:
|
||||
storage:
|
||||
type: sqlite
|
||||
path: /data/data.db
|
||||
|
||||
connectivity:
|
||||
checker:
|
||||
target: 1.1.1.1:53
|
||||
interval: 60s
|
||||
|
||||
security:
|
||||
oidc:
|
||||
issuer-url: "{{ opentofu.authentik_data.gatus.base_url }}/"
|
||||
redirect-url: "https://{{ gatus_svc.domain }}/authorization-code/callback"
|
||||
client-id: "{{ opentofu.authentik_data.gatus.client_id }}"
|
||||
client-secret: "{{ opentofu.authentik_data.gatus.client_secret }}"
|
||||
scopes: ["openid"]
|
||||
|
||||
ui:
|
||||
buttons:
|
||||
- name: Matrix Federation Tester
|
||||
link: "{{ gatus_federation_tester }}"
|
||||
|
||||
alerting:
|
||||
email: "{{ vault_gatus.alerting.email }}"
|
||||
ntfy: "{{ vault_gatus.alerting.ntfy }}"
|
||||
|
||||
external-endpoints: "{{ gatus_external_endpoints_backups }}"
|
||||
endpoints: "{{ gatus_endpoints_hosts | union(gatus_endpoints_services) | union(gatus_endpoints_other) }}"
|
||||
|
||||
gatus_compose:
|
||||
watchtower: true
|
||||
image: twinproduction/gatus
|
||||
volumes:
|
||||
- ./config.yaml:/config/config.yaml
|
||||
- data:/data
|
||||
file:
|
||||
volumes:
|
||||
data:
|
6
playbooks/roles/healthcheck/files/Dockerfile
Normal file
6
playbooks/roles/healthcheck/files/Dockerfile
Normal file
|
@ -0,0 +1,6 @@
|
|||
FROM ubuntu
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt update -y \
|
||||
&& apt install -y curl dnsutils msmtp gettext-base python3-matrix-nio python3-requests
|
17
playbooks/roles/healthcheck/files/data/mail
Executable file
17
playbooks/roles/healthcheck/files/data/mail
Executable file
|
@ -0,0 +1,17 @@
|
|||
#!/usr/bin/sh
|
||||
|
||||
cd /opt/ || exit
|
||||
|
||||
hc_url="https://hc-ping.com/$MAIL_HC_UID"
|
||||
|
||||
alias curl_hc='curl -LA "$USER_AGENT" --retry 3'
|
||||
|
||||
envsubst < template.msmtprc > /tmp/msmtprc
|
||||
envsubst < mailcheck.template.mail > /tmp/mailcheck.mail
|
||||
|
||||
result=$(msmtp -C /tmp/msmtprc -a default "$MAIL_HC_UID@hc-ping.com" < /tmp/mailcheck.mail 2>&1)
|
||||
if [ "$?" != "0" ]
|
||||
then
|
||||
echo "$result"
|
||||
curl_hc --data-raw "$result" "$hc_url/fail" >/dev/null
|
||||
fi
|
|
@ -0,0 +1,5 @@
|
|||
To: ${MAIL_HC_UID}@hc-ping.com
|
||||
From: ${MAIL_USER}
|
||||
Subject: Healthcheck
|
||||
|
||||
Mailserver alive
|
43
playbooks/roles/healthcheck/files/data/matrix
Executable file
43
playbooks/roles/healthcheck/files/data/matrix
Executable file
|
@ -0,0 +1,43 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import requests
|
||||
import sys
|
||||
|
||||
import asyncio
|
||||
from nio import AsyncClient, RoomMessageNotice
|
||||
|
||||
def send_ping(success, msg=""):
|
||||
url = os.environ['MATRIX_HC_URL']
|
||||
if not success:
|
||||
url += "/fail"
|
||||
|
||||
requests.get(url, data=msg, headers={'user-agent': os.environ['USER_AGENT']})
|
||||
|
||||
async def main():
|
||||
try:
|
||||
client = AsyncClient(os.environ['MATRIX_SERVER'])
|
||||
client.access_token = os.environ['MATRIX_TOKEN']
|
||||
client.device_id = os.environ['USER_AGENT']
|
||||
await client.room_send(
|
||||
room_id = os.environ['MATRIX_ROOM'],
|
||||
message_type = "m.room.message",
|
||||
content = {
|
||||
"msgtype": "m.text",
|
||||
"body": "!ping"
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
print("exception during login or sending")
|
||||
send_ping(False, str(e))
|
||||
sys.exit(1)
|
||||
await client.close()
|
||||
|
||||
send_ping(True)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
asyncio.new_event_loop().run_until_complete(main())
|
13
playbooks/roles/healthcheck/files/data/template.msmtprc
Normal file
13
playbooks/roles/healthcheck/files/data/template.msmtprc
Normal file
|
@ -0,0 +1,13 @@
|
|||
defaults
|
||||
auth on
|
||||
tls on
|
||||
tls_trust_file /etc/ssl/certs/ca-certificates.crt
|
||||
logfile /tmp/msmtp.log
|
||||
|
||||
account default
|
||||
host ${MAIL_HOST}
|
||||
port ${MAIL_PORT}
|
||||
tls_starttls on
|
||||
from ${MAIL_USER}
|
||||
user ${MAIL_USER}
|
||||
password ${MAIL_PASS}
|
4
playbooks/roles/healthcheck/files/healthcheck@.timer
Normal file
4
playbooks/roles/healthcheck/files/healthcheck@.timer
Normal file
|
@ -0,0 +1,4 @@
|
|||
[Timer]
|
||||
OnCalendar=*:0/5
|
||||
[Install]
|
||||
WantedBy=timers.target
|
16
playbooks/roles/healthcheck/tasks/docker.yml
Normal file
16
playbooks/roles/healthcheck/tasks/docker.yml
Normal file
|
@ -0,0 +1,16 @@
|
|||
---
|
||||
- name: Template the docker-compose file
|
||||
ansible.builtin.template:
|
||||
src: docker-compose.yml.j2
|
||||
dest: "{{ (service_path, 'docker-compose.yml') | path_join }}"
|
||||
mode: "0644"
|
||||
- name: Copy the Dockerfile
|
||||
ansible.builtin.copy:
|
||||
src: Dockerfile
|
||||
dest: "{{ (service_path, 'Dockerfile') | path_join }}"
|
||||
mode: "0644"
|
||||
- name: Copy the data files
|
||||
ansible.builtin.copy:
|
||||
src: data
|
||||
dest: "{{ service_path }}"
|
||||
mode: "0755"
|
28
playbooks/roles/healthcheck/tasks/main.yml
Normal file
28
playbooks/roles/healthcheck/tasks/main.yml
Normal file
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ healthcheck_svc }}"
|
||||
env: "{{ healthcheck_env }}"
|
||||
block:
|
||||
- name: Import tasks to create service directory
|
||||
ansible.builtin.import_tasks: tasks/steps/create-service-directory.yml
|
||||
|
||||
- name: Import tasks specific to docker
|
||||
ansible.builtin.import_tasks: docker.yml
|
||||
- name: Import tasks specific to systemd
|
||||
ansible.builtin.import_tasks: systemd.yml
|
||||
|
||||
- name: Import tasks create a service.env file
|
||||
ansible.builtin.import_tasks: tasks/steps/template-service-env.yml
|
||||
|
||||
- name: Build service
|
||||
ansible.builtin.command:
|
||||
cmd: docker compose build --pull
|
||||
chdir: "{{ service_path }}"
|
||||
when:
|
||||
- "'local-dev' != inventory_hostname"
|
||||
register: cmd_result
|
||||
changed_when: true
|
40
playbooks/roles/healthcheck/tasks/systemd.yml
Normal file
40
playbooks/roles/healthcheck/tasks/systemd.yml
Normal file
|
@ -0,0 +1,40 @@
|
|||
---
|
||||
- name: Template the system service
|
||||
ansible.builtin.template:
|
||||
src: healthcheck@.service.j2
|
||||
dest: /etc/systemd/system/healthcheck@.service
|
||||
mode: "0644"
|
||||
become: true
|
||||
- name: Copy the system timer
|
||||
ansible.builtin.copy:
|
||||
src: healthcheck@.timer
|
||||
dest: /etc/systemd/system/healthcheck@.timer
|
||||
mode: "0644"
|
||||
become: true
|
||||
|
||||
- name: Get all healthcheck timers
|
||||
ansible.builtin.shell: # noqa: command-instead-of-module
|
||||
cmd: "set -o pipefail && systemctl list-timers 'healthcheck@*' --all --output=json | jq -r '.[].unit'"
|
||||
register: systemd_timers_result
|
||||
changed_when: false
|
||||
|
||||
- name: Generate systemd timer names
|
||||
ansible.builtin.set_fact:
|
||||
healthcheck_systemd_timers: "{{ healthcheck_svc.checks | list_prefix_suffix('healthcheck@', '.timer') }}"
|
||||
|
||||
- name: Disable unused system timers
|
||||
ansible.builtin.systemd_service:
|
||||
name: "{{ item }}"
|
||||
state: stopped
|
||||
enabled: false
|
||||
loop: "{{ systemd_timers_result.stdout_lines | difference(healthcheck_systemd_timers) }}"
|
||||
become: true
|
||||
|
||||
- name: Enable the system timer
|
||||
ansible.builtin.systemd_service:
|
||||
name: "{{ item }}"
|
||||
state: started
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
loop: "{{ healthcheck_systemd_timers }}"
|
||||
become: true
|
19
playbooks/roles/healthcheck/templates/docker-compose.yml.j2
Normal file
19
playbooks/roles/healthcheck/templates/docker-compose.yml.j2
Normal file
|
@ -0,0 +1,19 @@
|
|||
x-common-elements:
|
||||
&common-elements
|
||||
build:
|
||||
context: .
|
||||
image: "{{ (container_registry.public, 'services/healthcheck') | path_join }}"
|
||||
restart: never
|
||||
env_file:
|
||||
- service.env
|
||||
volumes:
|
||||
- ./data/:/opt
|
||||
network_mode: host
|
||||
|
||||
services:
|
||||
matrix:
|
||||
<<: *common-elements
|
||||
command: "/opt/matrix"
|
||||
mail:
|
||||
<<: *common-elements
|
||||
command: "/opt/mail"
|
|
@ -0,0 +1,5 @@
|
|||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/usr/bin/docker compose run --rm %i
|
||||
WorkingDirectory={{ service_path }}
|
||||
RuntimeMaxSec=300
|
20
playbooks/roles/healthcheck/vars/main.yml
Normal file
20
playbooks/roles/healthcheck/vars/main.yml
Normal file
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
healthcheck_svc:
|
||||
checks:
|
||||
- mail
|
||||
- matrix
|
||||
|
||||
healthcheck_env:
|
||||
USER_AGENT: healthcheck-bot for serguzim.net
|
||||
|
||||
MATRIX_SERVER: https://matrix.serguzim.me
|
||||
MATRIX_SERVER_FEDTESTER: msrg.cc
|
||||
MATRIX_HC_URL: "{{ opentofu.healthchecksio.healthcheck.matrix.ping_url }}"
|
||||
MATRIX_TOKEN: "{{ vault_healthcheck.matrix.token }}"
|
||||
MATRIX_ROOM: "{{ vault_healthcheck.matrix.room }}"
|
||||
|
||||
MAIL_HC_UID: "{{ opentofu.healthchecksio.healthcheck.mail.id }}"
|
||||
MAIL_HOST: "{{ mailer.host }}"
|
||||
MAIL_PORT: "{{ mailer.port }}"
|
||||
MAIL_USER: "{{ vault_healthcheck.mailer.user }}"
|
||||
MAIL_PASS: "{{ vault_healthcheck.mailer.pass }}"
|
12
playbooks/roles/homebox/tasks/main.yml
Normal file
12
playbooks/roles/homebox/tasks/main.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ homebox_svc }}"
|
||||
env: "{{ homebox_env }}"
|
||||
compose: "{{ homebox_compose }}"
|
||||
block:
|
||||
- name: Import tasks to deploy common service
|
||||
ansible.builtin.import_tasks: tasks/deploy-common-service.yml
|
22
playbooks/roles/homebox/vars/main.yml
Normal file
22
playbooks/roles/homebox/vars/main.yml
Normal file
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
homebox_svc:
|
||||
domain: inventory.serguzim.me
|
||||
port: 7745
|
||||
|
||||
homebox_env:
|
||||
HBOX_OPTIONS_ALLOW_REGISTRATION: false
|
||||
HBOX_MAILER_HOST: mail.serguzim.me
|
||||
HBOX_MAILER_PORT: 587
|
||||
HBOX_MAILER_USERNAME: inventory@serguzim.me
|
||||
HBOX_MAILER_PASSWORD: "{{ vault_homebox.mailer_passwd }}"
|
||||
HBOX_MAILER_FROM: Homebox <inventory@serguzim.me>
|
||||
HBOX_SWAGGER_SCHEMA: https
|
||||
|
||||
homebox_compose:
|
||||
watchtower: true
|
||||
image: ghcr.io/hay-kot/homebox:latest-rootless
|
||||
volumes:
|
||||
- data:/data
|
||||
file:
|
||||
volumes:
|
||||
data:
|
12
playbooks/roles/immich/tasks/main.yml
Normal file
12
playbooks/roles/immich/tasks/main.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ immich_svc }}"
|
||||
env: "{{ immich_env }}"
|
||||
compose: "{{ immich_compose }}"
|
||||
block:
|
||||
- name: Import tasks to deploy common service
|
||||
ansible.builtin.import_tasks: tasks/deploy-common-service.yml
|
73
playbooks/roles/immich/vars/main.yml
Normal file
73
playbooks/roles/immich/vars/main.yml
Normal file
|
@ -0,0 +1,73 @@
|
|||
---
|
||||
immich_db_host: database
|
||||
immich_db_db: immich
|
||||
immich_db_user: "{{ vault_immich.db.user }}"
|
||||
immich_db_pass: "{{ vault_immich.db.pass }}"
|
||||
|
||||
immich_svc:
|
||||
domain: gallery.serguzim.me
|
||||
port: 3001
|
||||
version: release
|
||||
db:
|
||||
host: "{{ postgres.host }}"
|
||||
database: authentik
|
||||
|
||||
|
||||
immich_env:
|
||||
# IMMICH_CONFIG_FILE: /immich.json
|
||||
|
||||
TZ: "{{ timezone }}"
|
||||
|
||||
DB_HOSTNAME: "{{ immich_db_host }}"
|
||||
DB_DATABASE_NAME: "{{ immich_db_db }}"
|
||||
DB_USERNAME: "{{ immich_db_user }}"
|
||||
DB_PASSWORD: "{{ immich_db_pass }}"
|
||||
|
||||
POSTGRES_DB: "{{ immich_db_db }}"
|
||||
POSTGRES_USER: "{{ immich_db_user }}"
|
||||
POSTGRES_PASSWORD: "{{ immich_db_pass }}"
|
||||
|
||||
REDIS_HOSTNAME: redis
|
||||
|
||||
immich_compose:
|
||||
watchtower: false
|
||||
image: ghcr.io/immich-app/immich-server:release
|
||||
volumes:
|
||||
- upload:/usr/src/app/upload
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
depends_on:
|
||||
- database
|
||||
- redis
|
||||
|
||||
machine-learning:
|
||||
image: ghcr.io/immich-app/immich-machine-learning:release
|
||||
volumes:
|
||||
- model-cache:/cache
|
||||
env_file:
|
||||
- service.env
|
||||
restart: always
|
||||
networks:
|
||||
default:
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine
|
||||
restart: always
|
||||
networks:
|
||||
default:
|
||||
|
||||
database:
|
||||
image: tensorchord/pgvecto-rs:pg16-v0.2.0
|
||||
env_file:
|
||||
- service.env
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
restart: always
|
||||
networks:
|
||||
default:
|
||||
|
||||
volumes:
|
||||
upload:
|
||||
pgdata:
|
||||
model-cache:
|
28
playbooks/roles/influxdb/tasks/main.yml
Normal file
28
playbooks/roles/influxdb/tasks/main.yml
Normal file
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ influxdb_svc }}"
|
||||
env: "{{ influxdb_env }}"
|
||||
compose: "{{ influxdb_compose }}"
|
||||
yml: "{{ influxdb_yml }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Template config
|
||||
ansible.builtin.template:
|
||||
src: yml.j2
|
||||
dest: "{{ (service_path, 'influxdb.yml') | path_join }}"
|
||||
mode: "0600"
|
||||
register: cmd_result
|
||||
|
||||
- name: Set the docker force-recreate flag
|
||||
ansible.builtin.set_fact:
|
||||
docker_force_recreate: --force-recreate
|
||||
when: cmd_result.changed # noqa: no-handler We need to handle the restart per service. Handlers don't support variables.
|
||||
|
||||
- name: Import start tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/start-common-service.yml
|
72
playbooks/roles/influxdb/vars/main.yml
Normal file
72
playbooks/roles/influxdb/vars/main.yml
Normal file
|
@ -0,0 +1,72 @@
|
|||
---
|
||||
influxdb_svc:
|
||||
domain: tick.serguzim.me
|
||||
port: 8086
|
||||
data_dir: /var/lib/influxdb2
|
||||
|
||||
influxdb_yml:
|
||||
assets-path: ""
|
||||
bolt-path: "{{ (svc.data_dir, 'influxd.bolt') | path_join }}"
|
||||
e2e-testing: false
|
||||
engine-path: "{{ (svc.data_dir, 'engine') | path_join }}"
|
||||
feature-flags: {}
|
||||
http-bind-address: "0.0.0.0:{{ svc.port }}"
|
||||
influxql-max-select-buckets: 0
|
||||
influxql-max-select-point: 0
|
||||
influxql-max-select-series: 0
|
||||
key-name: ""
|
||||
log-level: info
|
||||
nats-max-payload-bytes: 1048576
|
||||
nats-port: 4222
|
||||
no-tasks: false
|
||||
query-concurrency: 10
|
||||
query-initial-memory-bytes: 0
|
||||
query-max-memory-bytes: 0
|
||||
query-memory-bytes: 9223372036854775807
|
||||
query-queue-size: 10
|
||||
reporting-disabled: false
|
||||
secret-store: bolt
|
||||
session-length: 60
|
||||
session-renew-disabled: false
|
||||
storage-cache-max-memory-size: 1073741824
|
||||
storage-cache-snapshot-memory-size: 26214400
|
||||
storage-cache-snapshot-write-cold-duration: 10m0s
|
||||
storage-compact-full-write-cold-duration: 4h0m0s
|
||||
storage-compact-throughput-burst: 50331648
|
||||
storage-max-concurrent-compactions: 0
|
||||
storage-max-index-log-file-size: 1048576
|
||||
storage-retention-check-interval: 30m0s
|
||||
storage-series-file-max-concurrent-snapshot-compactions: 0
|
||||
storage-series-id-set-cache-size: 0
|
||||
storage-shard-precreator-advance-period: 30m0s
|
||||
storage-shard-precreator-check-interval: 10m0s
|
||||
storage-tsm-use-madv-willneed: false
|
||||
storage-validate-keys: false
|
||||
storage-wal-fsync-delay: "0s"
|
||||
store: bolt
|
||||
testing-always-allow-setup: false
|
||||
tls-cert: ""
|
||||
tls-key: ""
|
||||
tls-min-version: "1.2"
|
||||
tls-strict-ciphers: false
|
||||
tracing-type: ""
|
||||
vault-addr: ""
|
||||
vault-cacert: ""
|
||||
vault-capath: ""
|
||||
vault-client-cert: ""
|
||||
vault-client-key: ""
|
||||
vault-client-timeout: "0s"
|
||||
vault-max-retries: 0
|
||||
vault-skip-verify: false
|
||||
vault-tls-server-name: ""
|
||||
vault-token: ""
|
||||
|
||||
influxdb_compose:
|
||||
watchtower: false
|
||||
image: influxdb:2.7
|
||||
volumes:
|
||||
- ./influxdb.yml:/etc/influxdb2/config.yml
|
||||
- data:{{ svc.data_dir }}
|
||||
file:
|
||||
volumes:
|
||||
data:
|
12
playbooks/roles/jellyfin/tasks/main.yml
Normal file
12
playbooks/roles/jellyfin/tasks/main.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ jellyfin_svc }}"
|
||||
env: "{{ jellyfin_env }}"
|
||||
compose: "{{ jellyfin_compose }}"
|
||||
block:
|
||||
- name: Import tasks to deploy common service
|
||||
ansible.builtin.import_tasks: tasks/deploy-common-service.yml
|
26
playbooks/roles/jellyfin/vars/main.yml
Normal file
26
playbooks/roles/jellyfin/vars/main.yml
Normal file
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
jellyfin_svc:
|
||||
domain: media.serguzim.me
|
||||
port: 8096
|
||||
db:
|
||||
host: "{{ postgres.host }}"
|
||||
port: "{{ postgres.port }}"
|
||||
|
||||
jellyfin_env:
|
||||
JELLYFIN_PublishedServerUrl: https://{{ svc.domain }}
|
||||
|
||||
jellyfin_compose:
|
||||
watchtower: true
|
||||
image: jellyfin/jellyfin
|
||||
volumes:
|
||||
- config:/config
|
||||
- cache:/cache
|
||||
- media:/media
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
user: 8096:8096
|
||||
volumes:
|
||||
config:
|
||||
cache:
|
||||
media:
|
6
playbooks/roles/lego/files/hook.sh
Normal file
6
playbooks/roles/lego/files/hook.sh
Normal file
|
@ -0,0 +1,6 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
cp -f "$LEGO_CERT_PATH" /certificates
|
||||
cp -f "$LEGO_CERT_KEY_PATH" /certificates
|
||||
|
||||
exit 33 # special exit code to signal that the certificate has been updated
|
12
playbooks/roles/lego/files/hooks/auth.serguzim.me
Executable file
12
playbooks/roles/lego/files/hooks/auth.serguzim.me
Executable file
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
domain="auth.serguzim.me"
|
||||
|
||||
_install() {
|
||||
install --owner=root --group=root --mode=600 \
|
||||
"$CERTIFICATES_PATH/$domain.$1" \
|
||||
"/opt/services/authentik/certs/$domain.$2"
|
||||
}
|
||||
|
||||
_install crt pem
|
||||
_install key key
|
16
playbooks/roles/lego/files/hooks/db.serguzim.me
Executable file
16
playbooks/roles/lego/files/hooks/db.serguzim.me
Executable file
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
domain="db.serguzim.me"
|
||||
|
||||
_install() {
|
||||
install --owner=postgres --group=postgres --mode=600 \
|
||||
"$CERTIFICATES_PATH/$domain.$1" \
|
||||
"/var/lib/postgres/data/server.$1"
|
||||
}
|
||||
|
||||
_install crt
|
||||
_install key
|
||||
|
||||
sudo -u postgres pg_ctl -D /var/lib/postgres/data/ reload
|
||||
|
||||
# vim: ft=sh
|
26
playbooks/roles/lego/files/lego.sh
Executable file
26
playbooks/roles/lego/files/lego.sh
Executable file
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
set -a
|
||||
. ./service.env
|
||||
set +a
|
||||
|
||||
domain="$1"
|
||||
action="run"
|
||||
|
||||
exisiting_domains=$(docker compose run --rm app list -n)
|
||||
|
||||
if echo "$exisiting_domains" | grep -q "$domain";
|
||||
then
|
||||
action="renew"
|
||||
fi
|
||||
|
||||
docker compose run --rm app \
|
||||
--domains "$domain" \
|
||||
"$action" \
|
||||
"--$action-hook" "/config/hook.sh"
|
||||
|
||||
if [ "$?" = "33" ] && [ -x "./hooks/$domain" ];
|
||||
then
|
||||
echo "Running hook for $domain"
|
||||
"./hooks/$domain"
|
||||
fi
|
10
playbooks/roles/lego/files/lego@.timer
Normal file
10
playbooks/roles/lego/files/lego@.timer
Normal file
|
@ -0,0 +1,10 @@
|
|||
[Unit]
|
||||
Description=Renew certificates
|
||||
|
||||
[Timer]
|
||||
Persistent=true
|
||||
OnCalendar=*-*-* 01:15:00
|
||||
RandomizedDelaySec=2h
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
19
playbooks/roles/lego/tasks/config.yml
Normal file
19
playbooks/roles/lego/tasks/config.yml
Normal file
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
- name: Set config path
|
||||
ansible.builtin.set_fact:
|
||||
config_path: "{{ (service_path, 'config') | path_join }}"
|
||||
- name: Create config directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ config_path }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
- name: Copy the acme-dns-accounts
|
||||
ansible.builtin.template:
|
||||
src: "json.j2"
|
||||
dest: "{{ (config_path, 'acme-dns-accounts.json') | path_join }}"
|
||||
mode: "0644"
|
||||
- name: Copy the hook script
|
||||
ansible.builtin.copy:
|
||||
src: "hook.sh"
|
||||
dest: "{{ (config_path, 'hook.sh') | path_join }}"
|
||||
mode: "0755"
|
14
playbooks/roles/lego/tasks/hooks.yml
Normal file
14
playbooks/roles/lego/tasks/hooks.yml
Normal file
|
@ -0,0 +1,14 @@
|
|||
---
|
||||
- name: Set hooks path
|
||||
ansible.builtin.set_fact:
|
||||
hooks_path: "{{ (service_path, 'hooks') | path_join }}"
|
||||
- name: Create hooks directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ hooks_path }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
- name: Copy the additional hooks
|
||||
ansible.builtin.copy:
|
||||
src: hooks/
|
||||
dest: "{{ hooks_path }}"
|
||||
mode: "0755"
|
43
playbooks/roles/lego/tasks/main.yml
Normal file
43
playbooks/roles/lego/tasks/main.yml
Normal file
|
@ -0,0 +1,43 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
env: "{{ lego_env }}"
|
||||
json: "{{ vault_acmedns_registered | acmedns_to_lego }}"
|
||||
compose: "{{ lego_compose }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Create _certificates directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ certificates_path }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
|
||||
- name: Import tasks specific to the config directory
|
||||
ansible.builtin.import_tasks: config.yml
|
||||
- name: Import tasks specific to hooks
|
||||
ansible.builtin.import_tasks: hooks.yml
|
||||
- name: Import tasks specific to systemd
|
||||
ansible.builtin.import_tasks: systemd.yml
|
||||
|
||||
- name: Copy the run script
|
||||
ansible.builtin.copy:
|
||||
src: "lego.sh"
|
||||
dest: "{{ (service_path, 'lego.sh') | path_join }}"
|
||||
mode: "0755"
|
||||
|
||||
- name: Import tasks create a service.env file
|
||||
ansible.builtin.import_tasks: tasks/steps/template-service-env.yml
|
||||
|
||||
- name: Run certificate-script for domains
|
||||
ansible.builtin.command:
|
||||
cmd: "./lego.sh {{ item }}"
|
||||
chdir: "{{ service_path }}"
|
||||
become: true
|
||||
loop: "{{ lego_host_certificates }}"
|
||||
register: cmd_result
|
||||
changed_when: cmd_result.stderr | regex_search('Server responded with a certificate.')
|
40
playbooks/roles/lego/tasks/systemd.yml
Normal file
40
playbooks/roles/lego/tasks/systemd.yml
Normal file
|
@ -0,0 +1,40 @@
|
|||
---
|
||||
- name: Copy the system service
|
||||
ansible.builtin.template:
|
||||
src: lego@.service.j2
|
||||
dest: /etc/systemd/system/lego@.service
|
||||
mode: "0644"
|
||||
become: true
|
||||
- name: Copy the system timer
|
||||
ansible.builtin.copy:
|
||||
src: lego@.timer
|
||||
dest: /etc/systemd/system/lego@.timer
|
||||
mode: "0644"
|
||||
become: true
|
||||
|
||||
- name: Get all lego timers
|
||||
ansible.builtin.shell:
|
||||
cmd: "set -o pipefail && systemctl list-timers 'lego@*' --all --output=json | jq -r '.[].unit'"
|
||||
register: systemd_timers_result
|
||||
changed_when: false
|
||||
|
||||
- name: Generate systemd timer names
|
||||
ansible.builtin.set_fact:
|
||||
lego_systemd_timers: "{{ lego_host_certificates | list_prefix_suffix('lego@', '.timer') }}"
|
||||
|
||||
- name: Disable unused system timers
|
||||
ansible.builtin.systemd_service:
|
||||
name: "{{ item }}"
|
||||
state: stopped
|
||||
enabled: false
|
||||
loop: "{{ systemd_timers_result.stdout_lines | difference(lego_systemd_timers) }}"
|
||||
become: true
|
||||
|
||||
- name: Enable the system timers
|
||||
ansible.builtin.systemd_service:
|
||||
name: "{{ item }}"
|
||||
state: started
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
loop: "{{ lego_systemd_timers }}"
|
||||
become: true
|
4
playbooks/roles/lego/templates/lego@.service.j2
Normal file
4
playbooks/roles/lego/templates/lego@.service.j2
Normal file
|
@ -0,0 +1,4 @@
|
|||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart={{ service_path }}/lego.sh %i
|
||||
WorkingDirectory={{ service_path }}
|
32
playbooks/roles/lego/vars/main.yml
Normal file
32
playbooks/roles/lego/vars/main.yml
Normal file
|
@ -0,0 +1,32 @@
|
|||
---
|
||||
lego_host_certificates: "{{ all_services | my_service_attributes(inventory_hostname, 'certificates') }}"
|
||||
|
||||
lego_env:
|
||||
ACME_DNS_API_BASE: https://{{ acme_dns.host }}
|
||||
ACME_DNS_STORAGE_PATH: /config/acme-dns-accounts.json
|
||||
|
||||
LEGO_EMAIL: "{{ admin_email }}"
|
||||
LEGO_PATH: /data
|
||||
|
||||
CERTIFICATES_PATH: "{{ certificates_path }}"
|
||||
|
||||
lego_compose:
|
||||
watchtower: false
|
||||
network: false
|
||||
image: goacme/lego
|
||||
volumes:
|
||||
- ./config:/config:ro
|
||||
- "{{ certificates_path }}:/certificates"
|
||||
- data:/data
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
restart: never
|
||||
network_mode: "host"
|
||||
entrypoint:
|
||||
- /lego
|
||||
- --accept-tos
|
||||
- --email={{ admin_email }}
|
||||
- --dns=acme-dns
|
||||
volumes:
|
||||
data:
|
12
playbooks/roles/linkwarden/tasks/main.yml
Normal file
12
playbooks/roles/linkwarden/tasks/main.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ linkwarden_svc }}"
|
||||
env: "{{ linkwarden_env }}"
|
||||
compose: "{{ linkwarden_compose }}"
|
||||
block:
|
||||
- name: Import tasks to deploy common service
|
||||
ansible.builtin.import_tasks: tasks/deploy-common-service.yml
|
38
playbooks/roles/linkwarden/vars/main.yml
Normal file
38
playbooks/roles/linkwarden/vars/main.yml
Normal file
|
@ -0,0 +1,38 @@
|
|||
---
|
||||
linkwarden_secret: "{{ vault_linkwarden.secret }}"
|
||||
|
||||
linkwarden_db_host_port: "{{ postgres.host }}:{{ postgres.port }}"
|
||||
linkwarden_db_user: "{{ opentofu.postgresql_data.linkwarden.user }}"
|
||||
linkwarden_db_pass: "{{ opentofu.postgresql_data.linkwarden.pass }}"
|
||||
linkwarden_db_database: "{{ opentofu.postgresql_data.linkwarden.database }}"
|
||||
|
||||
linkwarden_s3_accesskey: "{{ opentofu.scaleway_data.linkwarden.access_key }}"
|
||||
linkwarden_s3_secretkey: "{{ opentofu.scaleway_data.linkwarden.secret_key }}"
|
||||
|
||||
linkwarden_svc:
|
||||
domain: bookmarks.serguzim.me
|
||||
port: 3000
|
||||
|
||||
linkwarden_env:
|
||||
NEXTAUTH_SECRET: "{{ linkwarden_secret }}"
|
||||
NEXTAUTH_URL: https://bookmarks.serguzim.me/api/v1/auth
|
||||
DATABASE_URL: postgres://{{ linkwarden_db_user }}:{{ linkwarden_db_pass }}@{{ linkwarden_db_host_port }}/{{ linkwarden_db_database }}
|
||||
|
||||
SPACES_KEY: "{{ linkwarden_s3_accesskey }}"
|
||||
SPACES_SECRET: "{{ linkwarden_s3_secretkey }}"
|
||||
SPACES_ENDPOINT: "{{ opentofu.scaleway_data.linkwarden.api_endpoint }}"
|
||||
SPACES_BUCKET_NAME: "{{ opentofu.scaleway_data.linkwarden.name }}"
|
||||
SPACES_REGION: "{{ opentofu.scaleway_data.linkwarden.region }}"
|
||||
SPACES_FORCE_PATH_STYLE: false
|
||||
|
||||
NEXT_PUBLIC_DISABLE_REGISTRATION: true
|
||||
NEXT_PUBLIC_CREDENTIALS_ENABLED: false
|
||||
NEXT_PUBLIC_AUTHENTIK_ENABLED: true
|
||||
AUTHENTIK_CUSTOM_NAME: auth.serguzim.me
|
||||
AUTHENTIK_ISSUER: "{{ opentofu.authentik_data.linkwarden.base_url }}"
|
||||
AUTHENTIK_CLIENT_ID: "{{ opentofu.authentik_data.linkwarden.client_id }}"
|
||||
AUTHENTIK_CLIENT_SECRET: "{{ opentofu.authentik_data.linkwarden.client_secret }}"
|
||||
|
||||
linkwarden_compose:
|
||||
watchtower: true
|
||||
image: ghcr.io/linkwarden/linkwarden:latest
|
10
playbooks/roles/mailcowdockerized/tasks/main.yml
Normal file
10
playbooks/roles/mailcowdockerized/tasks/main.yml
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ mailcowdockerized_svc }}"
|
||||
block:
|
||||
- name: Import tasks to template the site for the reverse proxy
|
||||
ansible.builtin.import_tasks: tasks/steps/template-site-config.yml
|
6
playbooks/roles/mailcowdockerized/vars/main.yml
Normal file
6
playbooks/roles/mailcowdockerized/vars/main.yml
Normal file
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
mailcowdockerized_svc:
|
||||
domain: mail.serguzim.me
|
||||
docker_host: host.docker.internal
|
||||
port: 3004
|
||||
additional_domains: "{{ ['autodiscover', 'autoconfig'] | product(vault_mailcowdockerized.domains) | map('join', '.') }}"
|
11
playbooks/roles/minecraft_2/tasks/main.yml
Normal file
11
playbooks/roles/minecraft_2/tasks/main.yml
Normal file
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
env: "{{ minecraft_2_env }}"
|
||||
compose: "{{ minecraft_2_compose }}"
|
||||
block:
|
||||
- name: Import tasks to deploy common service
|
||||
ansible.builtin.import_tasks: tasks/deploy-common-service.yml
|
65
playbooks/roles/minecraft_2/vars/main.yml
Normal file
65
playbooks/roles/minecraft_2/vars/main.yml
Normal file
|
@ -0,0 +1,65 @@
|
|||
---
|
||||
minecraft_2_env:
|
||||
ALLOW_FLIGHT: true
|
||||
ALLOW_NETHER: true
|
||||
ANNOUNCE_PLAYER_ACHIEVEMENTS: true
|
||||
BROADCAST_CONSOLE_TO_OPS: true
|
||||
BROADCAST_RCON_TO_OPS: true
|
||||
CONSOLE: false
|
||||
ENABLE_AUTOPAUSE: true
|
||||
ENABLE_COMMAND_BLOCK: true
|
||||
ENABLE_JMX: false
|
||||
ENABLE_RCON: true
|
||||
ENABLE_STATUS: true
|
||||
ENABLE_WHITELIST: true
|
||||
ENFORCE_WHITELIST: true
|
||||
ENTITY_BROADCAST_RANGE_PERCENTAGE: 100
|
||||
EULA: true
|
||||
FORCE_GAMEMODE: false
|
||||
FUNCTION_PERMISSION_LEVEL: 2
|
||||
GENERATE_STRUCTURES: true
|
||||
HARDCORDE: false
|
||||
ICON:
|
||||
LEVEL_TYPE: DEFAULT
|
||||
MAX_BUILD_HEIGHT: 512
|
||||
MAX_MEMORY: 4G
|
||||
MAX_TICK_TIME: -1
|
||||
MAX_PLAYERS: 64
|
||||
MAX_WORLD_SIZE: 30000000
|
||||
MODE: survival
|
||||
MOTD:
|
||||
NETWORK_COMPRESSION_THRESHOLD: 256
|
||||
PVP: true
|
||||
SERVER_NAME: minecraft.serguzim.me
|
||||
SNOOPER_ENABLED: false
|
||||
SPAWN_ANIMALS: true
|
||||
SPAWN_MONSTERS: true
|
||||
SPAWN_NPCS: true
|
||||
SPAWN_PROTECTION: 0
|
||||
SYNC_CHUNK_WRITES: true
|
||||
TYPE: PAPER
|
||||
ONLINE_MODE: true
|
||||
OP_PERMISSION_LEVEL: 4
|
||||
OPS: "{{ vault_minecraft_2.ops }}"
|
||||
OVERRIDE_ICON: true
|
||||
OVERRIDE_SERVER_PROPERTIES: true
|
||||
PLAYER_IDLE_TIMEOUT: 0
|
||||
PREVENT_PROXY_CONNECTIONS: false
|
||||
SEED: "{{ vault_minecraft_2.seed }}"
|
||||
USE_NATIVE_TRANSPORT: true
|
||||
VERSION: LATEST
|
||||
VIEW_DISTANCE: 10
|
||||
WHITELIST: "{{ vault_minecraft_2.whitelist }}"
|
||||
|
||||
minecraft_2_compose:
|
||||
watchtower: false
|
||||
image: itzg/minecraft-server
|
||||
volumes:
|
||||
- data:/data
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
ports:
|
||||
- 25565:25565
|
||||
volumes:
|
||||
data:
|
12
playbooks/roles/minio/tasks/main.yml
Normal file
12
playbooks/roles/minio/tasks/main.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ minio_svc }}"
|
||||
env: "{{ minio_env }}"
|
||||
compose: "{{ minio_compose }}"
|
||||
block:
|
||||
- name: Import tasks to deploy common service
|
||||
ansible.builtin.import_tasks: tasks/deploy-common-service.yml
|
41
playbooks/roles/minio/vars/main.yml
Normal file
41
playbooks/roles/minio/vars/main.yml
Normal file
|
@ -0,0 +1,41 @@
|
|||
---
|
||||
minio_svc:
|
||||
domain: s3.serguzim.me
|
||||
port: 9000
|
||||
caddy_extra: |
|
||||
@nocache {
|
||||
query nocache=*
|
||||
}
|
||||
header @nocache "Cache-Control" "no-store, no-cache"
|
||||
extra_svcs:
|
||||
- domain: console.s3.serguzim.me
|
||||
docker_host: minio
|
||||
port: 9001
|
||||
|
||||
minio_env:
|
||||
MINIO_SERVER_URL: https://{{ svc.domain }}/
|
||||
MINIO_BROWSER_REDIRECT_URL: https://console.{{ svc.domain }}
|
||||
MINIO_VOLUMES: /data
|
||||
|
||||
MINIO_ROOT_USER: "{{ vault_minio.user }}"
|
||||
MINIO_ROOT_PASSWORD: "{{ vault_minio.pass }}"
|
||||
|
||||
MINIO_IDENTITY_OPENID_CONFIG_URL: "{{ (opentofu.authentik_data.minio.base_url, '.well-known/openid-configuration') | path_join }}"
|
||||
MINIO_IDENTITY_OPENID_CLIENT_ID: "{{ opentofu.authentik_data.minio.client_id }}"
|
||||
MINIO_IDENTITY_OPENID_CLIENT_SECRET: "{{ opentofu.authentik_data.minio.client_secret }}"
|
||||
MINIO_IDENTITY_OPENID_CLAIM_NAME: minio_policy
|
||||
MINIO_IDENTITY_OPENID_DISPLAY_NAME: auth.serguzim.me
|
||||
MINIO_IDENTITY_OPENID_SCOPES: openid,email,profile,minio
|
||||
|
||||
|
||||
minio_compose:
|
||||
watchtower: true
|
||||
image: minio/minio
|
||||
volumes:
|
||||
- data:/data
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
command: server --console-address ":9001"
|
||||
volumes:
|
||||
data:
|
12
playbooks/roles/ntfy/tasks/main.yml
Normal file
12
playbooks/roles/ntfy/tasks/main.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ ntfy_svc }}"
|
||||
compose: "{{ ntfy_compose }}"
|
||||
env: "{{ ntfy_env }}"
|
||||
block:
|
||||
- name: Import tasks to deploy common service
|
||||
ansible.builtin.import_tasks: tasks/deploy-common-service.yml
|
54
playbooks/roles/ntfy/vars/main.yml
Normal file
54
playbooks/roles/ntfy/vars/main.yml
Normal file
|
@ -0,0 +1,54 @@
|
|||
---
|
||||
ntfy_svc:
|
||||
domain: push.serguzim.me
|
||||
port: 80
|
||||
|
||||
ntfy_env:
|
||||
TZ: "{{ timezone }}"
|
||||
|
||||
NTFY_BASE_URL: "https://{{ ntfy_svc.domain }}"
|
||||
|
||||
NTFY_CACHE_FILE: /var/cache/ntfy/cache.db
|
||||
NTFY_CACHE_DURATION: "12h"
|
||||
|
||||
NTFY_BEHIND_PROXY: true
|
||||
|
||||
NTFY_AUTH_FILE: /var/lib/ntfy/user.db
|
||||
NTFY_AUTH_DEFAULT_ACCESS: "deny-all"
|
||||
|
||||
NTFY_ATTACHMENT_CACHE_DIR: "/var/cache/ntfy/attachments"
|
||||
NTFY_ATTACHMENT_TOTAL_SIZE_LIMIT: "5G"
|
||||
NTFY_ATTACHMENT_FILE_SIZE_LIMIT: "15M"
|
||||
NTFY_ATTACHMENT_EXPIRY_DURATION: "3h"
|
||||
|
||||
NTFY_KEEPALIVE_INTERVAL: "45s"
|
||||
NTFY_MANAGER_INTERVAL: "60m"
|
||||
|
||||
NTFY_ENABLE_SIGNUP: false
|
||||
NTFY_ENABLE_LOGIN: true
|
||||
NTFY_ENABLE_RESERVATIONS: true
|
||||
|
||||
NTFY_GLOBAL_TOPIC_LIMIT: 15000
|
||||
|
||||
NTFY_VISITOR_SUBSCRIPTION_LIMIT: 30
|
||||
NTFY_VISITOR_REQUEST_LIMIT_BURST: 60
|
||||
NTFY_VISITOR_REQUEST_LIMIT_REPLENISH: "5s"
|
||||
NTFY_VISITOR_ATTACHMENT_TOTAL_SIZE_LIMIT: "100M"
|
||||
NTFY_VISITOR_ATTACHMENT_DAILY_BANDWIDTH_LIMIT: "500M"
|
||||
|
||||
NTFY_ENABLE_METRICS: true
|
||||
|
||||
ntfy_compose:
|
||||
watchtower: true
|
||||
image: binwiederhier/ntfy
|
||||
volumes:
|
||||
- cache:/var/cache/ntfy
|
||||
- data:/var/lib/ntfy
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
command:
|
||||
- serve
|
||||
volumes:
|
||||
cache:
|
||||
data:
|
0
playbooks/roles/postgresql/.gitkeep
Normal file
0
playbooks/roles/postgresql/.gitkeep
Normal file
26
playbooks/roles/reitanlage_oranienburg/tasks/main.yml
Normal file
26
playbooks/roles/reitanlage_oranienburg/tasks/main.yml
Normal file
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ reitanlage_oranienburg_svc }}"
|
||||
compose: "{{ reitanlage_oranienburg_compose }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Get the Dockerfile
|
||||
ansible.builtin.get_url:
|
||||
url: https://raw.githubusercontent.com/getgrav/docker-grav/master/Dockerfile
|
||||
dest: "{{ (service_path, 'Dockerfile') | path_join }}"
|
||||
mode: "0644"
|
||||
register: cmd_result
|
||||
|
||||
- name: Set the docker rebuild flag
|
||||
ansible.builtin.set_fact:
|
||||
docker_rebuild: true
|
||||
when: cmd_result.changed # noqa: no-handler We need to handle the restart per service. Handlers don't support variables.
|
||||
|
||||
- name: Import start tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/start-common-service.yml
|
24
playbooks/roles/reitanlage_oranienburg/vars/main.yml
Normal file
24
playbooks/roles/reitanlage_oranienburg/vars/main.yml
Normal file
|
@ -0,0 +1,24 @@
|
|||
---
|
||||
reitanlage_oranienburg_svc:
|
||||
domain: reitanlage-oranienburg.de
|
||||
www_domain: true
|
||||
port: 80
|
||||
caddy_extra: |
|
||||
import analytics
|
||||
|
||||
header /images/* Cache-Control "max-age=31536000"
|
||||
header /assets/* Cache-Control "max-age=2629800"
|
||||
header /user/themes/* Cache-Control "max-age=2629800"
|
||||
|
||||
reitanlage_oranienburg_compose:
|
||||
watchtower: false
|
||||
image: "{{ (container_registry.public, 'library/grav') | path_join }}"
|
||||
volumes:
|
||||
- data:/var/www/html/
|
||||
file:
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
volumes:
|
||||
data:
|
12
playbooks/roles/shlink/tasks/main.yml
Normal file
12
playbooks/roles/shlink/tasks/main.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ shlink_svc }}"
|
||||
env: "{{ shlink_env }}"
|
||||
compose: "{{ shlink_compose }}"
|
||||
block:
|
||||
- name: Import tasks to deploy common service
|
||||
ansible.builtin.import_tasks: tasks/deploy-common-service.yml
|
30
playbooks/roles/shlink/vars/main.yml
Normal file
30
playbooks/roles/shlink/vars/main.yml
Normal file
|
@ -0,0 +1,30 @@
|
|||
---
|
||||
shlink_svc:
|
||||
domain: msrg.cc
|
||||
additional_domains:
|
||||
- "emgauwa.app"
|
||||
port: 8080
|
||||
|
||||
shlink_env:
|
||||
DEFAULT_DOMAIN: "{{ shlink_svc.domain }}"
|
||||
IS_HTTPS_ENABLED: true
|
||||
TIMEZONE: "{{ timezone }}"
|
||||
|
||||
DEFAULT_SHORT_CODES_LENGTH: 8
|
||||
MULTI_SEGMENT_SLUGS_ENABLED: false
|
||||
SHORT_URL_TRAILING_SLASH: true
|
||||
REDIRECT_APPEND_EXTRA_PATH: true
|
||||
DEFAULT_BASE_URL_REDIRECT: "https://www.serguzim.me/"
|
||||
|
||||
DB_DRIVER: postgres
|
||||
DB_HOST: "{{ postgres.host }}"
|
||||
DB_PORT: "{{ postgres.port }}"
|
||||
DB_NAME: "{{ opentofu.postgresql_data.shlink.database }}"
|
||||
DB_USER: "{{ opentofu.postgresql_data.shlink.user }}"
|
||||
DB_PASSWORD: "{{ opentofu.postgresql_data.shlink.pass }}"
|
||||
|
||||
GEOLITE_LICENSE_KEY: "{{ vault_shlink.geolite_key }}"
|
||||
|
||||
shlink_compose:
|
||||
watchtower: true
|
||||
image: shlinkio/shlink
|
48
playbooks/roles/software/tasks/docker.yml
Normal file
48
playbooks/roles/software/tasks/docker.yml
Normal file
|
@ -0,0 +1,48 @@
|
|||
- name: Install required system packages
|
||||
ansible.builtin.apt:
|
||||
pkg:
|
||||
- apt-transport-https
|
||||
- ca-certificates
|
||||
- curl
|
||||
- software-properties-common
|
||||
- python3-pip
|
||||
- virtualenv
|
||||
- python3-setuptools
|
||||
state: present
|
||||
update_cache: true
|
||||
become: true
|
||||
|
||||
- name: Add Docker GPG apt Key
|
||||
ansible.builtin.apt_key:
|
||||
url: https://download.docker.com/linux/ubuntu/gpg
|
||||
state: present
|
||||
become: true
|
||||
|
||||
- name: Add Docker Repository
|
||||
ansible.builtin.apt_repository:
|
||||
repo: deb https://download.docker.com/linux/ubuntu focal stable
|
||||
state: present
|
||||
become: true
|
||||
|
||||
- name: Update apt and install docker packages
|
||||
ansible.builtin.apt:
|
||||
pkg:
|
||||
- docker-ce
|
||||
- docker-ce-cli
|
||||
- containerd.io
|
||||
- docker-buildx-plugin
|
||||
- docker-compose-plugin
|
||||
state: present
|
||||
update_cache: true
|
||||
become: true
|
||||
|
||||
- name: Add user to the Docker group
|
||||
ansible.builtin.user:
|
||||
name: "{{ ansible_user }}"
|
||||
groups: docker
|
||||
append: true
|
||||
become: true
|
||||
|
||||
- name: Create a network
|
||||
community.docker.docker_network:
|
||||
name: apps
|
42
playbooks/roles/software/tasks/main.yml
Normal file
42
playbooks/roles/software/tasks/main.yml
Normal file
|
@ -0,0 +1,42 @@
|
|||
- name: Install aptitude
|
||||
ansible.builtin.apt:
|
||||
name: aptitude
|
||||
state: present
|
||||
update_cache: true
|
||||
become: true
|
||||
|
||||
- name: Install docker
|
||||
ansible.builtin.import_tasks: docker.yml
|
||||
|
||||
- name: Install jq and bzip2
|
||||
ansible.builtin.apt:
|
||||
pkg:
|
||||
- jq
|
||||
- bzip2
|
||||
state: present
|
||||
update_cache: true
|
||||
become: true
|
||||
|
||||
- name: Check if autorestic is installed
|
||||
ansible.builtin.stat:
|
||||
path: /usr/local/bin/autorestic
|
||||
register: autorestic_status
|
||||
|
||||
- name: Install autorestic
|
||||
ansible.builtin.shell: set -o pipefail && wget -qO - https://raw.githubusercontent.com/cupcakearmy/autorestic/master/install.sh | bash
|
||||
args:
|
||||
executable: /bin/bash
|
||||
when: not autorestic_status.stat.exists
|
||||
changed_when: true
|
||||
become: true
|
||||
|
||||
- name: Check if restic is installed
|
||||
ansible.builtin.stat:
|
||||
path: /usr/local/bin/restic
|
||||
register: restic_status
|
||||
|
||||
- name: Install restic
|
||||
ansible.builtin.command: autorestic install
|
||||
when: not restic_status.stat.exists
|
||||
changed_when: true
|
||||
become: true
|
22
playbooks/roles/synapse/files/msrg.cc.log.config
Normal file
22
playbooks/roles/synapse/files/msrg.cc.log.config
Normal file
|
@ -0,0 +1,22 @@
|
|||
version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
|
||||
handlers:
|
||||
console:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
|
||||
loggers:
|
||||
synapse.storage.SQL:
|
||||
# beware: increasing this to DEBUG will make synapse log sensitive
|
||||
# information such as access tokens.
|
||||
level: INFO
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
handlers: [console]
|
||||
|
||||
disable_existing_loggers: false
|
44
playbooks/roles/synapse/tasks/main.yml
Normal file
44
playbooks/roles/synapse/tasks/main.yml
Normal file
|
@ -0,0 +1,44 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ synapse_svc }}"
|
||||
env: "{{ synapse_env }}"
|
||||
compose: "{{ synapse_compose }}"
|
||||
yml: "{{ synapse_yml }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Set synapse config path
|
||||
ansible.builtin.set_fact:
|
||||
config_path: "{{ (service_path, svc.config_path) | path_join }}"
|
||||
|
||||
- name: Create config directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ config_path }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
|
||||
- name: Template config
|
||||
ansible.builtin.template:
|
||||
src: yml.j2
|
||||
dest: "{{ (config_path, 'homeserver.yaml') | path_join }}"
|
||||
mode: "0644"
|
||||
|
||||
- name: Copy the log config
|
||||
ansible.builtin.copy:
|
||||
src: msrg.cc.log.config
|
||||
dest: "{{ (config_path, 'msrg.cc.log.config') | path_join }}"
|
||||
mode: "0644"
|
||||
|
||||
- name: Copy the signing key
|
||||
ansible.builtin.copy:
|
||||
content: "{{ vault_synapse.signing_key }}"
|
||||
dest: "{{ (config_path, 'msrg.cc.signing.key') | path_join }}"
|
||||
mode: "0644"
|
||||
|
||||
- name: Import start tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/start-common-service.yml
|
122
playbooks/roles/synapse/vars/main.yml
Normal file
122
playbooks/roles/synapse/vars/main.yml
Normal file
|
@ -0,0 +1,122 @@
|
|||
---
|
||||
synapse_svc:
|
||||
domain: matrix.serguzim.me
|
||||
docker_host: synapse-admin
|
||||
port: 80
|
||||
caddy_extra: |
|
||||
handle /_matrix/* {
|
||||
reverse_proxy synapse:8008
|
||||
}
|
||||
handle /_synapse/* {
|
||||
reverse_proxy synapse:8008
|
||||
}
|
||||
extra_svcs:
|
||||
- domain: matrix.serguzim.me:8448
|
||||
additional_domains:
|
||||
- serguzim.me:8448
|
||||
docker_host: synapse
|
||||
port: 8008
|
||||
db:
|
||||
host: "{{ postgres.host }}"
|
||||
user: "{{ opentofu.postgresql_data.synapse.user }}"
|
||||
pass: "{{ opentofu.postgresql_data.synapse.pass }}"
|
||||
database: "{{ opentofu.postgresql_data.synapse.database }}"
|
||||
config_path: config
|
||||
|
||||
synapse_env:
|
||||
SYNAPSE_CONFIG_PATH: "{{ ('/', svc.config_path) | path_join }}"
|
||||
REACT_APP_SERVER: https://matrix.serguzim.me
|
||||
|
||||
synapse_yml:
|
||||
server_name: msrg.cc
|
||||
pid_file: "{{ (svc.config_path, 'homeserver.pid') | path_join }}"
|
||||
public_baseurl: https://matrix.serguzim.me/
|
||||
allow_public_rooms_without_auth: true
|
||||
allow_public_rooms_over_federation: true
|
||||
|
||||
listeners:
|
||||
- port: 8008
|
||||
tls: false
|
||||
type: http
|
||||
x_forwarded: true
|
||||
resources:
|
||||
- names:
|
||||
- client
|
||||
- federation
|
||||
- metrics
|
||||
compress: false
|
||||
|
||||
admin_contact: mailto:{{ admin_email }}
|
||||
|
||||
acme:
|
||||
enabled: false
|
||||
|
||||
database:
|
||||
name: psycopg2
|
||||
args:
|
||||
user: "{{ svc.db.user }}"
|
||||
password: "{{ svc.db.pass }}"
|
||||
database: "{{ svc.db.database }}"
|
||||
host: "{{ svc.db.host }}"
|
||||
cp_min: 5
|
||||
cp_max: 10
|
||||
|
||||
log_config: "{{ (svc.config_path, 'msrg.cc.log.config') | path_join }}"
|
||||
media_store_path: /media_store
|
||||
max_upload_size: 500M
|
||||
enable_registration: false
|
||||
enable_metrics: true
|
||||
report_stats: true
|
||||
|
||||
macaroon_secret_key: "{{ vault_synapse.macaroon_secret_key }}"
|
||||
form_secret: "{{ vault_synapse.form_secret }}"
|
||||
signing_key_path: "{{ (svc.config_path, 'msrg.cc.signing.key') | path_join }}"
|
||||
|
||||
trusted_key_servers:
|
||||
- server_name: matrix.org
|
||||
suppress_key_server_warning: true
|
||||
|
||||
oidc_providers:
|
||||
- idp_id: auth_serguzim_me
|
||||
idp_name: auth.serguzim.me
|
||||
issuer: "{{ opentofu.authentik_data.synapse.base_url }}"
|
||||
client_id: "{{ opentofu.authentik_data.synapse.client_id }}"
|
||||
client_secret: "{{ opentofu.authentik_data.synapse.client_secret }}"
|
||||
scopes:
|
||||
- openid
|
||||
- profile
|
||||
- email
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ '{{ user.preferred_username }}' }}"
|
||||
display_name_template: "{{ '{{ user.name }}' }}"
|
||||
|
||||
email:
|
||||
smtp_host: mail.serguzim.me
|
||||
smtp_port: 587
|
||||
smtp_user: matrix@serguzim.me
|
||||
smtp_pass: "{{ vault_synapse.mail.pass }}"
|
||||
require_transport_security: true
|
||||
notif_from: Matrix <matrix@serguzim.me>
|
||||
|
||||
synapse_compose:
|
||||
watchtower: true
|
||||
image: ghcr.io/element-hq/synapse:latest
|
||||
volumes:
|
||||
- ./config:/config
|
||||
- media_store:/media_store
|
||||
file:
|
||||
services:
|
||||
synapse-admin:
|
||||
image: awesometechnologies/synapse-admin
|
||||
restart: always
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: true
|
||||
env_file:
|
||||
- service.env
|
||||
networks:
|
||||
apps:
|
||||
aliases:
|
||||
- synapse-admin
|
||||
volumes:
|
||||
media_store:
|
12
playbooks/roles/tandoor/tasks/main.yml
Normal file
12
playbooks/roles/tandoor/tasks/main.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ tandoor_svc }}"
|
||||
env: "{{ tandoor_env }}"
|
||||
compose: "{{ tandoor_compose }}"
|
||||
block:
|
||||
- name: Import tasks to deploy common service
|
||||
ansible.builtin.import_tasks: tasks/deploy-common-service.yml
|
62
playbooks/roles/tandoor/vars/main.yml
Normal file
62
playbooks/roles/tandoor/vars/main.yml
Normal file
|
@ -0,0 +1,62 @@
|
|||
---
|
||||
tandoor_svc:
|
||||
domain: recipes.serguzim.me
|
||||
port: 80
|
||||
db:
|
||||
host: "{{ postgres.host }}"
|
||||
port: "{{ postgres.port }}"
|
||||
user: "{{ opentofu.postgresql_data.tandoor.user }}"
|
||||
pass: "{{ opentofu.postgresql_data.tandoor.pass }}"
|
||||
database: "{{ opentofu.postgresql_data.tandoor.database }}"
|
||||
|
||||
tandoor_env:
|
||||
DEBUG: 0
|
||||
SQL_DEBUG: 0
|
||||
|
||||
ALLOWED_HOSTS: recipes.serguzim.me
|
||||
SECRET_KEY: "{{ vault_tandoor.secret_key }}"
|
||||
TZ: "{{ timezone }}"
|
||||
|
||||
DB_ENGINE: django.db.backends.postgresql
|
||||
DB_OPTIONS: '{"sslmode": "require"}'
|
||||
POSTGRES_HOST: "{{ svc.db.host }}"
|
||||
POSTGRES_PORT: "{{ svc.db.port }}"
|
||||
POSTGRES_DB: "{{ svc.db.database }}"
|
||||
POSTGRES_USER: "{{ svc.db.user }}"
|
||||
POSTGRES_PASSWORD: "{{ svc.db.pass }}"
|
||||
|
||||
SHOPPING_MIN_AUTOSYNC_INTERVAL: 5
|
||||
|
||||
ENABLE_SIGNUP: 0
|
||||
ENABLE_METRICS: 1
|
||||
ENABLE_PDF_EXPORT: 1
|
||||
|
||||
SOCIAL_DEFAULT_ACCESS: 1
|
||||
SOCIAL_DEFAULT_GROUP: guest
|
||||
|
||||
tandoor_compose:
|
||||
watchtower: true
|
||||
image: nginx:mainline-alpine
|
||||
volumes:
|
||||
- nginx_config:/etc/nginx/conf.d:ro
|
||||
- staticfiles:/static
|
||||
- mediafiles:/media
|
||||
file:
|
||||
services:
|
||||
web_recipes:
|
||||
image: vabene1111/recipes
|
||||
restart: always
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: true
|
||||
env_file:
|
||||
- service.env
|
||||
volumes:
|
||||
- staticfiles:/opt/recipes/staticfiles
|
||||
- nginx_config:/opt/recipes/nginx/conf.d
|
||||
- mediafiles:/opt/recipes/mediafiles
|
||||
networks:
|
||||
default:
|
||||
volumes:
|
||||
nginx_config:
|
||||
staticfiles:
|
||||
mediafiles:
|
4
playbooks/roles/teamspeak_fallback/files/teamspeak-fallback-db
Executable file
4
playbooks/roles/teamspeak_fallback/files/teamspeak-fallback-db
Executable file
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
chown -R "${TEAMSPEAK_USER}:${TEAMSPEAK_GROUP}" /mnt/teamspeak_fallback_data
|
||||
install -o "${TEAMSPEAK_USER}" -g "${TEAMSPEAK_GROUP}" -m 644 "$WEBHOOK_DATA" "/mnt/teamspeak_fallback_data/ts3server.sqlitedb"
|
64
playbooks/roles/teamspeak_fallback/tasks/main.yml
Normal file
64
playbooks/roles/teamspeak_fallback/tasks/main.yml
Normal file
|
@ -0,0 +1,64 @@
|
|||
---
|
||||
- name: Set common facts
|
||||
ansible.builtin.import_tasks: tasks/set-default-facts.yml
|
||||
|
||||
- name: Deploy {{ role_name }}
|
||||
vars:
|
||||
svc: "{{ teamspeak_fallback_svc }}"
|
||||
compose: "{{ teamspeak_fallback_compose }}"
|
||||
env: "{{ teamspeak_fallback_env }}"
|
||||
yml: "{{ teamspeak_fallback_yml }}"
|
||||
block:
|
||||
- name: Import prepare tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/prepare-common-service.yml
|
||||
|
||||
- name: Template the conditional-start script
|
||||
ansible.builtin.template:
|
||||
src: conditional-start.sh.j2
|
||||
dest: "{{ (service_path, 'conditional-start.sh') | path_join }}"
|
||||
mode: "0755"
|
||||
|
||||
- name: Set webhook config path
|
||||
ansible.builtin.set_fact:
|
||||
config_path: "{{ (service_path, 'config') | path_join }}"
|
||||
|
||||
- name: Create config directory
|
||||
ansible.builtin.file:
|
||||
path: "{{ config_path }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
|
||||
- name: Template main config
|
||||
ansible.builtin.template:
|
||||
src: yml.j2
|
||||
dest: "{{ (config_path, 'hooks.yml') | path_join }}"
|
||||
mode: "0644"
|
||||
register: cmd_result
|
||||
|
||||
- name: Set the docker force-recreate flag
|
||||
ansible.builtin.set_fact:
|
||||
docker_force_recreate: --force-recreate
|
||||
when: cmd_result.changed # noqa: no-handler We need to handle the restart per service. Handlers don't support variables.
|
||||
|
||||
- name: Copy the teamspeak-fallback-db script
|
||||
ansible.builtin.copy:
|
||||
src: teamspeak-fallback-db
|
||||
dest: "{{ (config_path, 'teamspeak-fallback-db') | path_join }}"
|
||||
mode: "0755"
|
||||
|
||||
- name: Import start tasks for common service
|
||||
ansible.builtin.import_tasks: tasks/start-common-service.yml
|
||||
|
||||
- name: Copy the system service
|
||||
ansible.builtin.template:
|
||||
src: teamspeak-fallback.service.j2
|
||||
dest: /etc/systemd/system/teamspeak-fallback.service
|
||||
mode: "0644"
|
||||
become: true
|
||||
- name: Enable the system service
|
||||
ansible.builtin.systemd_service:
|
||||
name: teamspeak-fallback.service
|
||||
state: started
|
||||
enabled: true
|
||||
daemon_reload: true
|
||||
become: true
|
|
@ -0,0 +1,18 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
while true
|
||||
do
|
||||
if nc -z -w 3 "{{ teamspeak_fallback_check_server }}" "{{ teamspeak_fallback_check_port }}"
|
||||
then
|
||||
if docker compose ps --services | grep teamspeak >/dev/null; then
|
||||
echo "Stopping Server"
|
||||
docker compose down teamspeak
|
||||
fi
|
||||
else
|
||||
if ! docker compose ps --services | grep teamspeak >/dev/null; then
|
||||
echo "Starting Server"
|
||||
docker compose up -d --pull=always teamspeak
|
||||
fi
|
||||
fi
|
||||
sleep 2
|
||||
done
|
|
@ -0,0 +1,13 @@
|
|||
[Service]
|
||||
[Unit]
|
||||
Description=Teamspeak Fallback Starter
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart={{ service_path }}/conditional-start.sh
|
||||
WorkingDirectory={{ service_path }}
|
||||
Restart=on-failure
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue