stacks/monitoring/alertmanager/alertmanager.yml

# stacks/monitoring/alertmanager/alertmanager.yml
# Two routes: critical -> slack + email, warning -> email only.
# Tokens come from /run/secrets/ via docker compose secrets.

global:
  resolve_timeout: 5m
  smtp_smarthost: smtp.home.arpa:587
  smtp_from: alerts@home.arpa
  smtp_require_tls: true

templates:
  - /etc/alertmanager/templates/*.tmpl

route:
  receiver: default-email
  group_by: [alertname, instance]
  group_wait: 30s
  group_interval: 5m
  repeat_interval: 4h
  routes:
    - matchers:
        - severity = critical
      receiver: critical-pager
      group_wait: 10s
      repeat_interval: 1h
      continue: true

    - matchers:
        - severity = warning
      receiver: default-email
      repeat_interval: 12h

    - matchers:
        - alertname =~ "Blackbox.*"
      receiver: http-channel
      repeat_interval: 6h

inhibit_rules:
  - source_matchers:
      - severity = critical
    target_matchers:
      - severity = warning
    equal: [alertname, instance]

  - source_matchers:
      - alertname = HostDown
    target_matchers:
      - alertname =~ "ContainerRestartLoop|EndpointDown"
    equal: [instance]

receivers:
  - name: default-email
    email_configs:
      - to: ops@home.arpa
        send_resolved: true
        html: '{{ template "email.default.html" . }}'

  - name: critical-pager
    slack_configs:
      - api_url_file: /run/secrets/slack_webhook
        channel: "#alerts"
        send_resolved: true
        title: '{{ template "slack.title" . }}'
        text: '{{ template "slack.text" . }}'
    email_configs:
      - to: page@home.arpa
        send_resolved: true

  - name: http-channel
    slack_configs:
      - api_url_file: /run/secrets/slack_webhook
        channel: "#http-health"
        send_resolved: true