Add prometheus metrics to alloy
This commit is contained in:
parent
616788c5ea
commit
5ad3e9bfe2
8 changed files with 174 additions and 19 deletions
|
@ -1,9 +1,9 @@
|
||||||
output "authentik_data" {
|
output "authentik_data" {
|
||||||
value = {
|
value = {
|
||||||
for key in keys(authentik_application.service_applications) : key => {
|
for key in keys(authentik_application.service_applications) : key => {
|
||||||
"base_url" = "${var.authentik_url}/application/o/${authentik_application.service_applications[key].slug}"
|
base_url = "${var.authentik_url}/application/o/${authentik_application.service_applications[key].slug}"
|
||||||
"client_id" = authentik_provider_oauth2.service_providers[key].client_id
|
client_id = authentik_provider_oauth2.service_providers[key].client_id
|
||||||
"client_secret" = authentik_provider_oauth2.service_providers[key].client_secret
|
client_secret = authentik_provider_oauth2.service_providers[key].client_secret
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sensitive = true
|
sensitive = true
|
||||||
|
@ -12,19 +12,28 @@ output "authentik_data" {
|
||||||
output "postgresql_data" {
|
output "postgresql_data" {
|
||||||
value = {
|
value = {
|
||||||
for key in keys(postgresql_database.service_databases) : key => {
|
for key in keys(postgresql_database.service_databases) : key => {
|
||||||
"user" = postgresql_role.service_roles[key].name
|
user = postgresql_role.service_roles[key].name
|
||||||
"pass" = postgresql_role.service_roles[key].password
|
pass = postgresql_role.service_roles[key].password
|
||||||
"database" = postgresql_database.service_databases[key].name
|
database = postgresql_database.service_databases[key].name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sensitive = true
|
sensitive = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
output "postgresql_metrics_collector" {
|
||||||
|
value = {
|
||||||
|
user = postgresql_role.metrics_collector_role.name
|
||||||
|
pass = postgresql_role.metrics_collector_role.password
|
||||||
|
database = postgresql_database.metrics_collector_database.name
|
||||||
|
}
|
||||||
|
sensitive = true
|
||||||
|
}
|
||||||
|
|
||||||
output "mailcow_data" {
|
output "mailcow_data" {
|
||||||
value = {
|
value = {
|
||||||
for key in keys(mailcow_mailbox.services) : key => {
|
for key in keys(mailcow_mailbox.services) : key => {
|
||||||
"address" = mailcow_mailbox.services[key].address
|
address = mailcow_mailbox.services[key].address
|
||||||
"password" = mailcow_mailbox.services[key].password
|
password = mailcow_mailbox.services[key].password
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sensitive = true
|
sensitive = true
|
||||||
|
|
|
@ -16,3 +16,21 @@ resource "postgresql_database" "service_databases" {
|
||||||
name = each.key
|
name = each.key
|
||||||
owner = postgresql_role.service_roles[each.key].name
|
owner = postgresql_role.service_roles[each.key].name
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resource "random_password" "postgresql_metrics_collector_password" {
|
||||||
|
length = 32
|
||||||
|
special = false
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "postgresql_role" "metrics_collector_role" {
|
||||||
|
name = "metrics_collector"
|
||||||
|
login = true
|
||||||
|
password = random_password.postgresql_metrics_collector_password.result
|
||||||
|
search_path = ["postgres_exporter", "pg_catalog"]
|
||||||
|
roles = ["pg_monitor", "pg_read_all_stats"]
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "postgresql_database" "metrics_collector_database" {
|
||||||
|
name = "metrics_collector"
|
||||||
|
owner = postgresql_role.metrics_collector_role.name
|
||||||
|
}
|
||||||
|
|
|
@ -27,6 +27,11 @@ output "postgresql_data" {
|
||||||
sensitive = true
|
sensitive = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
output "postgresql_metrics_collector" {
|
||||||
|
value = module.services.postgresql_metrics_collector
|
||||||
|
sensitive = true
|
||||||
|
}
|
||||||
|
|
||||||
output "postgresql" {
|
output "postgresql" {
|
||||||
value = {
|
value = {
|
||||||
"host" = var.postgresql_host
|
"host" = var.postgresql_host
|
||||||
|
|
44
playbooks/filter_plugins/alloy.py
Normal file
44
playbooks/filter_plugins/alloy.py
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
def transfer_optional_param(source, target, name, target_name=None):
|
||||||
|
if param := source.get(name):
|
||||||
|
target[target_name or name] = param
|
||||||
|
|
||||||
|
class FilterModule(object):
|
||||||
|
def filters(self):
|
||||||
|
return {
|
||||||
|
'services_to_alloy': self.services_to_alloy,
|
||||||
|
}
|
||||||
|
|
||||||
|
def services_to_alloy(self, services):
|
||||||
|
result = []
|
||||||
|
|
||||||
|
for name, service in services.items():
|
||||||
|
if not bool(service.get("host")):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if targets := service.get("metrics") or []:
|
||||||
|
job = {
|
||||||
|
"name": name,
|
||||||
|
"targets": [],
|
||||||
|
"scrape_interval": "60s",
|
||||||
|
}
|
||||||
|
|
||||||
|
for target in targets:
|
||||||
|
|
||||||
|
address = target.get("address") or service["dns"][0]['domain']
|
||||||
|
|
||||||
|
transfer_optional_param(target, job, "interval", "scrape_interval")
|
||||||
|
|
||||||
|
new_target = {
|
||||||
|
"address": address,
|
||||||
|
"path": target["path"],
|
||||||
|
"instance": name
|
||||||
|
}
|
||||||
|
|
||||||
|
transfer_optional_param(target, new_target, "instance")
|
||||||
|
transfer_optional_param(target, new_target, "job")
|
||||||
|
|
||||||
|
job["targets"].append(new_target)
|
||||||
|
|
||||||
|
result.append(job)
|
||||||
|
|
||||||
|
return result
|
|
@ -3,23 +3,66 @@ logging {
|
||||||
format = "logfmt"
|
format = "logfmt"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
prometheus.remote_write "mimir" {
|
||||||
|
endpoint {
|
||||||
|
url = "https://{{ lgtm_stack_mimir_domain }}/api/v1/push"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
prometheus.exporter.self "alloy" {}
|
prometheus.exporter.self "alloy" {}
|
||||||
prometheus.scrape "alloy" {
|
prometheus.scrape "alloy" {
|
||||||
targets = prometheus.exporter.self.alloy.targets
|
targets = prometheus.exporter.self.alloy.targets
|
||||||
forward_to = [prometheus.remote_write.mimir.receiver]
|
forward_to = [prometheus.remote_write.mimir.receiver]
|
||||||
}
|
}
|
||||||
|
|
||||||
prometheus.scrape "node_exporter" {
|
prometheus.exporter.postgres "default" {
|
||||||
targets = [
|
data_source_names = ["postgresql://{{ svc.postgresql_collector.user }}:{{ svc.postgresql_collector.pass }}@{{ svc.postgresql_collector.host }}:{{ svc.postgresql_collector.port }}/{{ svc.postgresql_collector.database }}?sslmode=verify-full"]
|
||||||
{% for host_data in opentofu.hosts.values() %}
|
|
||||||
{"__address__" = "{{ host_data.fqdn_vpn }}:9100", "job" = "node_exporter"},
|
autodiscovery {
|
||||||
{% endfor %}
|
enabled = true
|
||||||
]
|
}
|
||||||
forward_to = [prometheus.remote_write.mimir.receiver]
|
}
|
||||||
|
prometheus.scrape "postgres" {
|
||||||
|
targets = prometheus.exporter.postgres.default.targets
|
||||||
|
forward_to = [prometheus.remote_write.mimir.receiver]
|
||||||
}
|
}
|
||||||
|
|
||||||
prometheus.remote_write "mimir" {
|
prometheus.scrape "node_exporter" {
|
||||||
endpoint {
|
targets = [
|
||||||
url = "https://{{ lgtm_stack_mimir_domain }}/api/v1/push"
|
{% for host_data in opentofu.hosts.values() %}
|
||||||
}
|
{"__address__" = "{{ host_data.fqdn_vpn }}:9100", "instance" = "{{ host_data.hostname }}"},
|
||||||
|
{% endfor %}
|
||||||
|
]
|
||||||
|
forward_to = [prometheus.remote_write.mimir.receiver]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
prometheus.scrape "caddy" {
|
||||||
|
targets = [
|
||||||
|
{% for host_data in opentofu.hosts.values() %}
|
||||||
|
{"__address__" = "{{ host_data.fqdn_vpn }}:2019", "instance" = "{{ host_data.hostname }}"},
|
||||||
|
{% endfor %}
|
||||||
|
]
|
||||||
|
forward_to = [prometheus.remote_write.mimir.receiver]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
{% for job in lgtm_stack_alloy_jobs %}
|
||||||
|
|
||||||
|
prometheus.scrape "{{ job.name }}" {
|
||||||
|
targets = [
|
||||||
|
{% for target in job.targets %}
|
||||||
|
{
|
||||||
|
"__address__" = "{{ target.address }}",
|
||||||
|
"__metrics_path__" = "{{ target.path }}",
|
||||||
|
"__scheme__" = "https",
|
||||||
|
{% if 'job' in target %}"job" = "{{ target.job }}",{% endif %}
|
||||||
|
{% if 'instance' in target %}"instance" = "{{ target.instance }}",{% endif %}
|
||||||
|
},
|
||||||
|
{% endfor %}
|
||||||
|
]
|
||||||
|
|
||||||
|
scrape_interval = "{{ job.scrape_interval }}"
|
||||||
|
forward_to = [prometheus.remote_write.mimir.receiver]
|
||||||
|
}
|
||||||
|
|
||||||
|
{% endfor %}
|
|
@ -3,6 +3,9 @@ lgtm_stack_domain: "{{ all_services | service_get_domain(role_name) }}"
|
||||||
lgtm_stack_mimir_domain: mimir.serguzim.me
|
lgtm_stack_mimir_domain: mimir.serguzim.me
|
||||||
lgtm_stack_alloy_domain: alloy.serguzim.me
|
lgtm_stack_alloy_domain: alloy.serguzim.me
|
||||||
|
|
||||||
|
lgtm_stack_alloy_jobs: "{{ all_services | services_to_alloy() }}"
|
||||||
|
|
||||||
|
|
||||||
lgtm_stack_svc:
|
lgtm_stack_svc:
|
||||||
domain: "{{ lgtm_stack_domain }}"
|
domain: "{{ lgtm_stack_domain }}"
|
||||||
port: 3000
|
port: 3000
|
||||||
|
@ -15,6 +18,12 @@ lgtm_stack_svc:
|
||||||
docker_host: lgtm_stack_mimir
|
docker_host: lgtm_stack_mimir
|
||||||
port: 9009
|
port: 9009
|
||||||
caddy_extra: import vpn_only
|
caddy_extra: import vpn_only
|
||||||
|
postgresql_collector:
|
||||||
|
host: "{{ postgres.host }}"
|
||||||
|
port: "{{ postgres.port }}"
|
||||||
|
user: "{{ opentofu.postgresql_metrics_collector.user }}"
|
||||||
|
pass: "{{ opentofu.postgresql_metrics_collector.pass }}"
|
||||||
|
database: "{{ opentofu.postgresql_metrics_collector.database }}"
|
||||||
|
|
||||||
lgtm_stack_env:
|
lgtm_stack_env:
|
||||||
|
|
||||||
|
|
|
@ -196,6 +196,10 @@ services = {
|
||||||
name = "forgejo_data"
|
name = "forgejo_data"
|
||||||
type = "docker"
|
type = "docker"
|
||||||
}]
|
}]
|
||||||
|
# TODO: add auth stuff to alloy
|
||||||
|
#metrics = [{
|
||||||
|
# path = "/metrics"
|
||||||
|
#}]
|
||||||
monitoring = {
|
monitoring = {
|
||||||
url = "/api/v1/version"
|
url = "/api/v1/version"
|
||||||
group = "3-services"
|
group = "3-services"
|
||||||
|
@ -300,6 +304,9 @@ services = {
|
||||||
name = "influxdb_data"
|
name = "influxdb_data"
|
||||||
type = "docker"
|
type = "docker"
|
||||||
}]
|
}]
|
||||||
|
metrics = [{
|
||||||
|
path = "/metrics"
|
||||||
|
}]
|
||||||
monitoring = {
|
monitoring = {
|
||||||
url = "/health"
|
url = "/health"
|
||||||
group = "3-services"
|
group = "3-services"
|
||||||
|
@ -501,6 +508,9 @@ services = {
|
||||||
name = "ntfy_data"
|
name = "ntfy_data"
|
||||||
type = "docker"
|
type = "docker"
|
||||||
}]
|
}]
|
||||||
|
metrics = [{
|
||||||
|
path = "/metrics"
|
||||||
|
}]
|
||||||
monitoring = {
|
monitoring = {
|
||||||
url = "/v1/health"
|
url = "/v1/health"
|
||||||
group = "3-services"
|
group = "3-services"
|
||||||
|
@ -607,6 +617,9 @@ services = {
|
||||||
name = "synapse_media_store"
|
name = "synapse_media_store"
|
||||||
type = "docker"
|
type = "docker"
|
||||||
}]
|
}]
|
||||||
|
metrics = [{
|
||||||
|
path = "/_synapse/metrics"
|
||||||
|
}]
|
||||||
monitoring = {
|
monitoring = {
|
||||||
url = "/_matrix/client/versions"
|
url = "/_matrix/client/versions"
|
||||||
group = "3-services"
|
group = "3-services"
|
||||||
|
@ -732,6 +745,9 @@ services = {
|
||||||
name = "vikunja_data"
|
name = "vikunja_data"
|
||||||
type = "docker"
|
type = "docker"
|
||||||
}]
|
}]
|
||||||
|
metrics = [{
|
||||||
|
path = "/api/v1/metrics"
|
||||||
|
}]
|
||||||
monitoring = {
|
monitoring = {
|
||||||
url = "/api/v1/info"
|
url = "/api/v1/info"
|
||||||
group = "3-services"
|
group = "3-services"
|
||||||
|
@ -792,6 +808,10 @@ services = {
|
||||||
alias = "woodpecker"
|
alias = "woodpecker"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
# TODO: add auth stuff to alloy
|
||||||
|
#metrics = [{
|
||||||
|
# path = "/metrics"
|
||||||
|
#}]
|
||||||
monitoring = {
|
monitoring = {
|
||||||
url = "/healthz"
|
url = "/healthz"
|
||||||
group = "3-services"
|
group = "3-services"
|
||||||
|
|
|
@ -139,6 +139,13 @@ variable "services" {
|
||||||
interval = optional(string)
|
interval = optional(string)
|
||||||
conditions = optional(list(string))
|
conditions = optional(list(string))
|
||||||
}))
|
}))
|
||||||
|
metrics = optional(list(object({
|
||||||
|
path = string
|
||||||
|
address = optional(string)
|
||||||
|
instance = optional(string)
|
||||||
|
job = optional(string)
|
||||||
|
interval = optional(string)
|
||||||
|
})))
|
||||||
ports = optional(list(object({
|
ports = optional(list(object({
|
||||||
description = string
|
description = string
|
||||||
port = string
|
port = string
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue