Refactor services-to-host-mapping and playbooks
This commit is contained in:
parent
825393bbd3
commit
a8e14b53f5
9 changed files with 142 additions and 108 deletions
58
inventory/group_vars/all/all_services.yml
Normal file
58
inventory/group_vars/all/all_services.yml
Normal file
|
@ -0,0 +1,58 @@
|
|||
all_services:
|
||||
- name: acme_dns
|
||||
- name: authentik
|
||||
- name: extra_services
|
||||
- name: faas
|
||||
- name: forgejo
|
||||
volumes_backup:
|
||||
- forgejo_data
|
||||
- name: forgejo_runner
|
||||
- name: healthcheck
|
||||
- name: homebox
|
||||
volumes_backup:
|
||||
- homebox_data
|
||||
- name: immich
|
||||
volumes_backup:
|
||||
- immich_upload
|
||||
- name: influxdb
|
||||
volumes_backup:
|
||||
- influxdb_data
|
||||
- name: jellyfin
|
||||
volumes_backup:
|
||||
- jellyfin_config
|
||||
#- jellyfin_media # TODO
|
||||
- name: linkwarden
|
||||
- name: mailcow
|
||||
- name: minio
|
||||
volumes_backup:
|
||||
- minio_data
|
||||
- name: ntfy
|
||||
volumes_backup:
|
||||
- ntfy_data
|
||||
- name: reitanlage_oranienburg
|
||||
volumes_backup:
|
||||
- reitanlage-oranienburg_data
|
||||
- name: shlink
|
||||
- name: synapse
|
||||
volumes_backup:
|
||||
- synapse_media_store
|
||||
ports:
|
||||
- 8448:8448
|
||||
- name: tandoor
|
||||
volumes_backup:
|
||||
- tandoor_mediafiles
|
||||
- name: teamspeak_fallback
|
||||
volumes_backup:
|
||||
- teamspeak-fallback-data
|
||||
- name: telegraf
|
||||
- name: tinytinyrss
|
||||
- name: umami
|
||||
- name: uptime_kuma
|
||||
volumes_backup:
|
||||
- uptime-kuma_data
|
||||
- name: vikunja
|
||||
volumes_backup:
|
||||
- vikunja_data
|
||||
- name: webhook
|
||||
- name: wiki_js
|
||||
- name: woodpecker
|
|
@ -1,4 +1,9 @@
|
|||
all:
|
||||
children:
|
||||
serguzim_net:
|
||||
hosts:
|
||||
node002:
|
||||
node003:
|
||||
hosts:
|
||||
local-dev:
|
||||
ansible_connection: local
|
||||
|
@ -29,20 +34,31 @@ all:
|
|||
hc_uid: "{{ vault_node002.backup.hc_uid }}"
|
||||
uptime_kuma_token: "{{ vault_node002.backup.uptime_kuma_token }}"
|
||||
volumes:
|
||||
- forgejo_data
|
||||
- homebox_data
|
||||
- immich_upload
|
||||
- influxdb_data
|
||||
- jellyfin_config
|
||||
#- jellyfin_media # TODO
|
||||
- minio_data
|
||||
- ntfy_data
|
||||
- reitanlage-oranienburg_data
|
||||
- synapse_media_store
|
||||
- tandoor_mediafiles
|
||||
- teamspeak-fallback-data
|
||||
- uptime-kuma_data
|
||||
- vikunja_data
|
||||
host_services:
|
||||
- authentik
|
||||
- extra_services
|
||||
- faas
|
||||
- forgejo
|
||||
- forgejo_runner
|
||||
- healthcheck
|
||||
- homebox
|
||||
- immich
|
||||
- influxdb
|
||||
- jellyfin
|
||||
- minio
|
||||
- ntfy
|
||||
- reitanlage_oranienburg
|
||||
- shlink
|
||||
- synapse
|
||||
- tandoor
|
||||
- teamspeak_fallback
|
||||
- telegraf
|
||||
- tinytinyrss
|
||||
- uptime_kuma
|
||||
- vikunja
|
||||
- watchtower
|
||||
- webhook
|
||||
- woodpecker
|
||||
|
||||
node003:
|
||||
ansible_host: node003.vpn.serguzim.net
|
||||
|
@ -56,3 +72,9 @@ all:
|
|||
hc_uid: "{{ vault_node003.backup.hc_uid }}"
|
||||
uptime_kuma_token: "{{ vault_node003.backup.uptime_kuma_token }}"
|
||||
volumes: []
|
||||
host_services:
|
||||
- acme_dns
|
||||
- linkwarden
|
||||
- mailcow
|
||||
- umami
|
||||
- wiki_js
|
||||
|
|
17
playbooks/filter_plugins/my_service_attributes.py
Normal file
17
playbooks/filter_plugins/my_service_attributes.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
class FilterModule(object):
|
||||
def filters(self):
|
||||
return {
|
||||
'my_service_attributes': self.my_service_attributes,
|
||||
}
|
||||
|
||||
def my_service_attributes(self, services, my_services, attribute="name"):
|
||||
result = []
|
||||
for service in services:
|
||||
if service["name"] in my_services:
|
||||
if attribute in service:
|
||||
if type(service[attribute]) == list:
|
||||
result.extend(service[attribute])
|
||||
else:
|
||||
result.append(service[attribute])
|
||||
|
||||
return result
|
|
@ -1,63 +0,0 @@
|
|||
---
|
||||
- name: Run roles for node002
|
||||
hosts: node002
|
||||
roles:
|
||||
- role: always
|
||||
- role: backup
|
||||
tags: backup
|
||||
- role: lego
|
||||
tags: lego
|
||||
- role: caddy
|
||||
tags: caddy
|
||||
vars:
|
||||
caddy_ports_extra:
|
||||
- 8448:8448
|
||||
- role: watchtower
|
||||
tags: watchtower
|
||||
|
||||
- role: authentik
|
||||
tags: authentik
|
||||
- role: extra_services
|
||||
tags: extra_services
|
||||
- role: faas
|
||||
tags: faas
|
||||
- role: forgejo
|
||||
tags: forgejo
|
||||
- role: forgejo_runner
|
||||
tags: forgejo_runner
|
||||
- role: healthcheck
|
||||
tags: healthcheck
|
||||
- role: homebox
|
||||
tags: homebox
|
||||
- role: immich
|
||||
tags: immich
|
||||
- role: influxdb
|
||||
tags: influxdb
|
||||
- role: jellyfin
|
||||
tags: jellyfin
|
||||
- role: minio
|
||||
tags: minio
|
||||
- role: ntfy
|
||||
tags: ntfy
|
||||
- role: reitanlage_oranienburg
|
||||
tags: reitanlage_oranienburg
|
||||
- role: shlink
|
||||
tags: shlink
|
||||
- role: synapse
|
||||
tags: synapse
|
||||
- role: tandoor
|
||||
tags: tandoor
|
||||
- role: teamspeak_fallback
|
||||
tags: teamspeak_fallback
|
||||
- role: telegraf
|
||||
tags: telegraf
|
||||
- role: tinytinyrss
|
||||
tags: tinytinyrss
|
||||
- role: uptime_kuma
|
||||
tags: uptime_kuma
|
||||
- role: vikunja
|
||||
tags: vikunja
|
||||
- role: webhook
|
||||
tags: webhook
|
||||
- role: woodpecker
|
||||
tags: woodpecker
|
|
@ -1,24 +0,0 @@
|
|||
---
|
||||
- name: Run roles for node003
|
||||
hosts: node003
|
||||
roles:
|
||||
- role: always
|
||||
- role: docker
|
||||
tags: common
|
||||
- role: backup
|
||||
tags: backup
|
||||
- role: caddy
|
||||
tags: caddy
|
||||
- role: watchtower
|
||||
tags: watchtower
|
||||
|
||||
- role: acme_dns
|
||||
tags: acme-dns
|
||||
- role: linkwarden
|
||||
tags: linkwarden
|
||||
- role: mailcow
|
||||
tags: mailcow
|
||||
- role: umami
|
||||
tags: umami
|
||||
- role: wiki_js
|
||||
tags: wiki_js
|
|
@ -1,6 +1,28 @@
|
|||
---
|
||||
- name: Run playbook for node001
|
||||
import_playbook: node001.yml
|
||||
- name: Run all roles
|
||||
hosts: serguzim_net
|
||||
tasks:
|
||||
- name: Include common roles
|
||||
ansible.builtin.include_role:
|
||||
name: "{{ services_item }}"
|
||||
apply:
|
||||
tags: "{{ services_item }}"
|
||||
tags: always
|
||||
loop:
|
||||
- always
|
||||
- backup
|
||||
- lego
|
||||
- caddy
|
||||
- watchtower
|
||||
loop_control:
|
||||
loop_var: services_item
|
||||
|
||||
- name: Run playbook for node002
|
||||
import_playbook: node002.yml
|
||||
- name: Include service roles
|
||||
ansible.builtin.include_role:
|
||||
name: "{{ services_item }}"
|
||||
apply:
|
||||
tags: "{{ services_item }}"
|
||||
tags: always
|
||||
loop: "{{ all_services | my_service_attributes(host_services) }}"
|
||||
loop_control:
|
||||
loop_var: services_item
|
||||
|
|
|
@ -5,7 +5,8 @@ backup_image: "{{ (container_registry.public, 'services/backup') | path_join }}"
|
|||
backup_svc:
|
||||
name: backup
|
||||
|
||||
backup_volumes_service: "{{ host_backup.volumes | map_backup_volumes_service }}"
|
||||
backup_volumes_list: "{{ all_services | my_service_attributes(host_services, 'volumes_backup') }}"
|
||||
backup_volumes_service: "{{ backup_volumes_list | map_backup_volumes_service }}"
|
||||
|
||||
backup_env:
|
||||
HC_UID: "{{ host_backup.hc_uid }}"
|
||||
|
@ -56,4 +57,4 @@ backup_compose:
|
|||
devices:
|
||||
- /dev/fuse
|
||||
|
||||
volumes: "{{ host_backup.volumes | map_backup_volumes }}"
|
||||
volumes: "{{ backup_volumes_list | map_backup_volumes }}"
|
||||
|
|
|
@ -9,6 +9,7 @@ caddy_ports_default:
|
|||
- 443:443
|
||||
- 443:443/udp
|
||||
- "{{ host_vpn.ip }}:2019:2019"
|
||||
caddy_ports_extra: "{{ all_services | my_service_attributes(host_services, 'ports') }}"
|
||||
caddy_ports: "{{ caddy_ports_default | union(caddy_ports_extra) }}"
|
||||
|
||||
caddy_svc:
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
ansible.builtin.template:
|
||||
src: yml.j2
|
||||
dest: "{{ (service_path, 'config.yml') | path_join }}"
|
||||
mode: "0600"
|
||||
mode: "0644"
|
||||
register: cmd_result
|
||||
|
||||
- name: Set the docker force-recreate flag
|
||||
|
|
Loading…
Reference in a new issue