Add prometheus metrics to alloy
This commit is contained in:
parent
616788c5ea
commit
5ad3e9bfe2
8 changed files with 174 additions and 19 deletions
playbooks
44
playbooks/filter_plugins/alloy.py
Normal file
44
playbooks/filter_plugins/alloy.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
def transfer_optional_param(source, target, name, target_name=None):
|
||||
if param := source.get(name):
|
||||
target[target_name or name] = param
|
||||
|
||||
class FilterModule(object):
|
||||
def filters(self):
|
||||
return {
|
||||
'services_to_alloy': self.services_to_alloy,
|
||||
}
|
||||
|
||||
def services_to_alloy(self, services):
|
||||
result = []
|
||||
|
||||
for name, service in services.items():
|
||||
if not bool(service.get("host")):
|
||||
continue
|
||||
|
||||
if targets := service.get("metrics") or []:
|
||||
job = {
|
||||
"name": name,
|
||||
"targets": [],
|
||||
"scrape_interval": "60s",
|
||||
}
|
||||
|
||||
for target in targets:
|
||||
|
||||
address = target.get("address") or service["dns"][0]['domain']
|
||||
|
||||
transfer_optional_param(target, job, "interval", "scrape_interval")
|
||||
|
||||
new_target = {
|
||||
"address": address,
|
||||
"path": target["path"],
|
||||
"instance": name
|
||||
}
|
||||
|
||||
transfer_optional_param(target, new_target, "instance")
|
||||
transfer_optional_param(target, new_target, "job")
|
||||
|
||||
job["targets"].append(new_target)
|
||||
|
||||
result.append(job)
|
||||
|
||||
return result
|
|
@ -3,23 +3,66 @@ logging {
|
|||
format = "logfmt"
|
||||
}
|
||||
|
||||
prometheus.remote_write "mimir" {
|
||||
endpoint {
|
||||
url = "https://{{ lgtm_stack_mimir_domain }}/api/v1/push"
|
||||
}
|
||||
}
|
||||
|
||||
prometheus.exporter.self "alloy" {}
|
||||
prometheus.scrape "alloy" {
|
||||
targets = prometheus.exporter.self.alloy.targets
|
||||
forward_to = [prometheus.remote_write.mimir.receiver]
|
||||
}
|
||||
|
||||
prometheus.scrape "node_exporter" {
|
||||
targets = [
|
||||
{% for host_data in opentofu.hosts.values() %}
|
||||
{"__address__" = "{{ host_data.fqdn_vpn }}:9100", "job" = "node_exporter"},
|
||||
{% endfor %}
|
||||
]
|
||||
forward_to = [prometheus.remote_write.mimir.receiver]
|
||||
prometheus.exporter.postgres "default" {
|
||||
data_source_names = ["postgresql://{{ svc.postgresql_collector.user }}:{{ svc.postgresql_collector.pass }}@{{ svc.postgresql_collector.host }}:{{ svc.postgresql_collector.port }}/{{ svc.postgresql_collector.database }}?sslmode=verify-full"]
|
||||
|
||||
autodiscovery {
|
||||
enabled = true
|
||||
}
|
||||
}
|
||||
prometheus.scrape "postgres" {
|
||||
targets = prometheus.exporter.postgres.default.targets
|
||||
forward_to = [prometheus.remote_write.mimir.receiver]
|
||||
}
|
||||
|
||||
prometheus.remote_write "mimir" {
|
||||
endpoint {
|
||||
url = "https://{{ lgtm_stack_mimir_domain }}/api/v1/push"
|
||||
}
|
||||
prometheus.scrape "node_exporter" {
|
||||
targets = [
|
||||
{% for host_data in opentofu.hosts.values() %}
|
||||
{"__address__" = "{{ host_data.fqdn_vpn }}:9100", "instance" = "{{ host_data.hostname }}"},
|
||||
{% endfor %}
|
||||
]
|
||||
forward_to = [prometheus.remote_write.mimir.receiver]
|
||||
}
|
||||
|
||||
prometheus.scrape "caddy" {
|
||||
targets = [
|
||||
{% for host_data in opentofu.hosts.values() %}
|
||||
{"__address__" = "{{ host_data.fqdn_vpn }}:2019", "instance" = "{{ host_data.hostname }}"},
|
||||
{% endfor %}
|
||||
]
|
||||
forward_to = [prometheus.remote_write.mimir.receiver]
|
||||
}
|
||||
|
||||
|
||||
{% for job in lgtm_stack_alloy_jobs %}
|
||||
|
||||
prometheus.scrape "{{ job.name }}" {
|
||||
targets = [
|
||||
{% for target in job.targets %}
|
||||
{
|
||||
"__address__" = "{{ target.address }}",
|
||||
"__metrics_path__" = "{{ target.path }}",
|
||||
"__scheme__" = "https",
|
||||
{% if 'job' in target %}"job" = "{{ target.job }}",{% endif %}
|
||||
{% if 'instance' in target %}"instance" = "{{ target.instance }}",{% endif %}
|
||||
},
|
||||
{% endfor %}
|
||||
]
|
||||
|
||||
scrape_interval = "{{ job.scrape_interval }}"
|
||||
forward_to = [prometheus.remote_write.mimir.receiver]
|
||||
}
|
||||
|
||||
{% endfor %}
|
|
@ -3,6 +3,9 @@ lgtm_stack_domain: "{{ all_services | service_get_domain(role_name) }}"
|
|||
lgtm_stack_mimir_domain: mimir.serguzim.me
|
||||
lgtm_stack_alloy_domain: alloy.serguzim.me
|
||||
|
||||
lgtm_stack_alloy_jobs: "{{ all_services | services_to_alloy() }}"
|
||||
|
||||
|
||||
lgtm_stack_svc:
|
||||
domain: "{{ lgtm_stack_domain }}"
|
||||
port: 3000
|
||||
|
@ -15,6 +18,12 @@ lgtm_stack_svc:
|
|||
docker_host: lgtm_stack_mimir
|
||||
port: 9009
|
||||
caddy_extra: import vpn_only
|
||||
postgresql_collector:
|
||||
host: "{{ postgres.host }}"
|
||||
port: "{{ postgres.port }}"
|
||||
user: "{{ opentofu.postgresql_metrics_collector.user }}"
|
||||
pass: "{{ opentofu.postgresql_metrics_collector.pass }}"
|
||||
database: "{{ opentofu.postgresql_metrics_collector.database }}"
|
||||
|
||||
lgtm_stack_env:
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue