mirror of
https://gitlab.com/s3lph/ansible-collection-prometheus
synced 2024-11-21 17:21:01 +01:00
Migrate Prometheus roles to collection
This commit is contained in:
commit
439f5a1c48
38 changed files with 1187 additions and 0 deletions
5
README.md
Normal file
5
README.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
# Ansible Collection - s3lph.prometheus
|
||||
|
||||
Documentation for the collection.
|
||||
|
||||
WIP
|
61
galaxy.yml
Normal file
61
galaxy.yml
Normal file
|
@ -0,0 +1,61 @@
|
|||
# The namespace of the collection. This can be a company/brand/organization or product namespace under which all
|
||||
# content lives. May only contain alphanumeric lowercase characters and underscores. Namespaces cannot start with
|
||||
# underscores or numbers and cannot contain consecutive underscores
|
||||
namespace: s3lph
|
||||
|
||||
# The name of the collection. Has the same character restrictions as 'namespace'
|
||||
name: prometheus
|
||||
|
||||
# The version of the collection. Must be compatible with semantic versioning
|
||||
version: 0.9
|
||||
|
||||
# The path to the Markdown (.md) readme file. This path is relative to the root of the collection
|
||||
readme: README.md
|
||||
|
||||
# A list of the collection's content authors. Can be just the name or in the format 'Full Name <email> (url)
|
||||
# @nicks:irc/im.site#channel'
|
||||
authors:
|
||||
- s3lph <account-gitlab-ideynizv@kernelpanic.lol>
|
||||
|
||||
|
||||
### OPTIONAL but strongly recommended
|
||||
# A short summary description of the collection
|
||||
description: A full monitoring stack with node exporters, blackbox exporter, prometheus and alertmanager
|
||||
|
||||
# Either a single license or a list of licenses for content inside of a collection. Ansible Galaxy currently only
|
||||
# accepts L(SPDX,https://spdx.org/licenses/) licenses. This key is mutually exclusive with 'license_file'
|
||||
license:
|
||||
- MIT
|
||||
|
||||
# A list of tags you want to associate with the collection for indexing/searching. A tag name has the same character
|
||||
# requirements as 'namespace' and 'name'
|
||||
tags:
|
||||
- prometheus
|
||||
- alertmanager
|
||||
- node-exporter
|
||||
- monitoring
|
||||
|
||||
# Collections that this collection requires to be installed for it to be usable. The key of the dict is the
|
||||
# collection label 'namespace.name'. The value is a version range
|
||||
# L(specifiers,https://python-semanticversion.readthedocs.io/en/latest/#requirement-specification). Multiple version
|
||||
# range specifiers can be set and are separated by ','
|
||||
dependencies: {}
|
||||
|
||||
# The URL of the originating SCM repository
|
||||
repository: https://gitlab.com/s3lph/ansible-collection-prometheus
|
||||
|
||||
# The URL to any online docs
|
||||
documentation: https://gitlab.com/s3lph/ansible-collection-prometheus
|
||||
|
||||
# The URL to the homepage of the collection/project
|
||||
homepage: https://gitlab.com/s3lph/ansible-collection-prometheus
|
||||
|
||||
# The URL to the collection issue tracker
|
||||
issues: https://gitlab.com/s3lph/ansible-collection-prometheus/-/issues
|
||||
|
||||
# A list of file glob-like patterns used to filter any files or directories that should not be included in the build
|
||||
# artifact. A pattern is matched from the relative path of the file or directory of the collection directory. This
|
||||
# uses 'fnmatch' to match the files or directories. Some directories and files like 'galaxy.yml', '*.pyc', '*.retry',
|
||||
# and '.git' are always filtered
|
||||
build_ignore: []
|
||||
|
11
roles/alertmanager/defaults/main.yml
Normal file
11
roles/alertmanager/defaults/main.yml
Normal file
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
|
||||
alertmanager_port: 9093
|
||||
alertmanager_arguments: []
|
||||
|
||||
notification_channels: {}
|
||||
notification_templates: []
|
||||
notification_receivers: {}
|
||||
inhibit_rules: []
|
||||
|
||||
routes: {}
|
11
roles/alertmanager/handlers/main.yml
Normal file
11
roles/alertmanager/handlers/main.yml
Normal file
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
|
||||
- name: restart alertmanager
|
||||
service:
|
||||
name: prometheus-alertmanager.service
|
||||
state: restarted
|
||||
|
||||
- name: reload alertmanager
|
||||
service:
|
||||
name: prometheus-alertmanager.service
|
||||
state: reloaded
|
30
roles/alertmanager/tasks/config.yml
Normal file
30
roles/alertmanager/tasks/config.yml
Normal file
|
@ -0,0 +1,30 @@
|
|||
---
|
||||
|
||||
- name: render alertmanager runtime arguments
|
||||
lineinfile:
|
||||
path: /etc/default/prometheus-alertmanager
|
||||
regexp: "^ARGS=.*$"
|
||||
line: >-
|
||||
ARGS="{{ alertmanager_arguments }}"
|
||||
insertbefore: BOF
|
||||
notify: restart alertmanager
|
||||
|
||||
- name: render alertmanager config template
|
||||
template:
|
||||
src: alertmanager.yml.j2
|
||||
dest: /etc/prometheus/alertmanager.yml
|
||||
owner: root
|
||||
group: prometheus
|
||||
mode: 0640
|
||||
validate: /usr/bin/amtool check-config %s
|
||||
notify: reload alertmanager
|
||||
|
||||
- name: render alertmanager notification templates
|
||||
copy:
|
||||
content: "{{ item.content }}"
|
||||
dest: "{{ item.path }}"
|
||||
owner: root
|
||||
group: prometheus
|
||||
mode: 0640
|
||||
loop: "{{ notification_templates }}"
|
||||
notify: reload alertmanager
|
13
roles/alertmanager/tasks/install.yml
Normal file
13
roles/alertmanager/tasks/install.yml
Normal file
|
@ -0,0 +1,13 @@
|
|||
---
|
||||
|
||||
- name: install alertmanager
|
||||
apt:
|
||||
name: prometheus-alertmanager
|
||||
state: present
|
||||
update_cache: yes
|
||||
|
||||
- name: start and enable alertmanager
|
||||
systemd:
|
||||
name: prometheus-alertmanager.service
|
||||
state: started
|
||||
enabled: yes
|
13
roles/alertmanager/tasks/main.yml
Normal file
13
roles/alertmanager/tasks/main.yml
Normal file
|
@ -0,0 +1,13 @@
|
|||
---
|
||||
|
||||
- name: install alertmanager
|
||||
import_tasks: install.yml
|
||||
tags:
|
||||
- "role::alertmanager"
|
||||
- "role::alertmanager:install"
|
||||
|
||||
- name: configure alertmanager
|
||||
import_tasks: config.yml
|
||||
tags:
|
||||
- "role::alertmanager"
|
||||
- "role::alertmanager:config"
|
93
roles/alertmanager/templates/alertmanager.yml.j2
Normal file
93
roles/alertmanager/templates/alertmanager.yml.j2
Normal file
|
@ -0,0 +1,93 @@
|
|||
---
|
||||
{{ ansible_managed | comment }}
|
||||
|
||||
global:
|
||||
{% if 'smtp' in notification_channels %}
|
||||
# The default SMTP From header field.
|
||||
smtp_from: {{ notification_channels.smtp.from }}
|
||||
# The default SMTP smarthost used for sending emails, including port number.
|
||||
# Port number usually is 25, or 587 for SMTP over TLS (sometimes referred to as STARTTLS).
|
||||
# Example: smtp.example.org:587
|
||||
smtp_smarthost: {{ notification_channels.smtp.host }}
|
||||
# The default hostname to identify to the SMTP server.
|
||||
smtp_hello: {{ notification_channels.smtp.hello | default(notfication_channel_smtp_hello) }}
|
||||
# SMTP Auth using CRAM-MD5, LOGIN and PLAIN. If empty, Alertmanager doesn't authenticate to the SMTP server.
|
||||
smtp_auth_username: {{ notification_channels.smtp.username | default(notification_channels.smtp.from) }}
|
||||
{% if notification_channels.smtp.method == 'login' %}
|
||||
# SMTP Auth using LOGIN and PLAIN.
|
||||
smtp_auth_password: {{ notification_channels.smtp.password }}
|
||||
{% elif notification_channels.smtp.method == 'plain' %}
|
||||
# SMTP Auth using PLAIN.
|
||||
smtp_auth_identity: {{ notification_channels.smtp.password }}
|
||||
{% elif notification_channels.smtp.method == 'cram-md5' %}
|
||||
# SMTP Auth using CRAM-MD5.
|
||||
smtp_auth_secret: {{ notification_channels.smtp.password }}
|
||||
{% endif %}
|
||||
# The default SMTP TLS requirement.
|
||||
smtp_require_tls: {{ (not notification_channels.smtp.tls_not_required) | default(True) }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'slack' in notification_channels %}
|
||||
# The API URL to use for Slack notifications.
|
||||
slack_api_url: {{ notification_channels.slack.url }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'victorops' in notification_channels %}
|
||||
victorops_api_url: {{ notification_channels.victorops.url }}
|
||||
victorops_api_key: {{ notification_channels.victorops.key }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'pagerduty' in notification_channels %}
|
||||
pagerduty_url: {{ notification_channels.pagerduty.url }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'opsgenie' in notification_channels %}
|
||||
opsgenie_api_url: {{ notification_channels.opsgenie.url }}
|
||||
opsgenie_api_key: {{ notification_channels.opsgenie.key }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'hipchat' in notification_channels %}
|
||||
hipchat_api_url: {{ notification_channels.hipchat.url }}
|
||||
hipchat_auth_token: {{ notification_channels.hipchat.key }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'wechat' in notification_channels %}
|
||||
wechat_api_url: {{ notification_channels.wechat.url }}
|
||||
wechat_api_secret: {{ notification_channels.wechat.key }}
|
||||
wechat_api_corp_id: {{ notification_channels.wechat.corp }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'http' in notification_channels %}
|
||||
http_config: {}
|
||||
{% endif %}
|
||||
|
||||
templates:
|
||||
{% if notification_templates | length == 0 %}
|
||||
[]
|
||||
{% else %}
|
||||
{% for template in notification_templates %}
|
||||
- {{ template.path }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
receivers:
|
||||
{% if notification_receivers | length == 0 %}
|
||||
[]
|
||||
{% else %}
|
||||
{% for name, config in notification_receivers.items() %}
|
||||
- name: {{ name }}
|
||||
{{ config | to_nice_yaml(indent=2) | indent(width=4) }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
inhibit_rules:
|
||||
{% if inhibit_rules | length == 0 %}
|
||||
[]
|
||||
{% else %}
|
||||
{% for rule in inhibit_rules %}
|
||||
- {{ rule | to_nice_yaml(indent=2) | indent(width=4) }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
route:
|
||||
{{ routes | to_nice_yaml(indent=2) | indent(width=2) }}
|
26
roles/prometheus/defaults/main.yml
Normal file
26
roles/prometheus/defaults/main.yml
Normal file
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
|
||||
blackbox_prefer_ipv4: no
|
||||
prometheus_blackbox_exporter_version: 0.16.0
|
||||
|
||||
prometheus_port: 9090
|
||||
prometheus_alertmanager_port: 9093
|
||||
prometheus_node_exporter_port: 9100
|
||||
|
||||
prometheus_scrape_interval: 1m
|
||||
prometheus_evaluation_interval: 1m
|
||||
prometheus_lookback_delta: 5m
|
||||
|
||||
blackbox_znc_pass: CertainlyInvalidPassword
|
||||
blackbox_znc_nick: prometheus
|
||||
blackbox_znc_fullname: "CCC Basel Prometheus Monitoring"
|
||||
|
||||
blackbox_irc_nick: prometheus
|
||||
blackbox_irc_fullname: "CCC Basel Prometheus Monitoring"
|
||||
blackbox_irc_pingtext: prometheus
|
||||
|
||||
blackbox_ircs_nick: prometheus
|
||||
blackbox_ircs_fullname: "CCC Basel Prometheus Monitoring"
|
||||
blackbox_ircs_pingtext: prometheus
|
||||
|
||||
blackbox_smtp_name: prometheus
|
11
roles/prometheus/handlers/main.yml
Normal file
11
roles/prometheus/handlers/main.yml
Normal file
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
|
||||
- name: restart prometheus
|
||||
systemd:
|
||||
name: prometheus.service
|
||||
state: restarted
|
||||
|
||||
- name: restart blackbox exporter
|
||||
systemd:
|
||||
name: prometheus-blackbox-exporter-local.service
|
||||
state: restarted
|
10
roles/prometheus/tasks/blackbox.yml
Normal file
10
roles/prometheus/tasks/blackbox.yml
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
|
||||
- name: render blackbox exporter configuration
|
||||
template:
|
||||
src: blackbox.yml
|
||||
dest: /etc/prometheus/blackbox.yml
|
||||
owner: root
|
||||
group: root
|
||||
mode: 0644
|
||||
notify: restart blackbox exporter
|
41
roles/prometheus/tasks/flush.yml
Normal file
41
roles/prometheus/tasks/flush.yml
Normal file
|
@ -0,0 +1,41 @@
|
|||
---
|
||||
|
||||
- name: enable prometheus admin api
|
||||
lineinfile:
|
||||
path: /etc/default/prometheus
|
||||
regexp: "^ARGS=.*$"
|
||||
line: >-
|
||||
ARGS="--query.lookback-delta={{ prometheus_lookback_delta }} --web.enable-admin-api --web.listen-address=127.0.0.1:9090"
|
||||
insertbefore: BOF
|
||||
notify: restart prometheus
|
||||
|
||||
- name: flush handlers
|
||||
meta: flush_handlers
|
||||
|
||||
- name: wait
|
||||
pause:
|
||||
seconds: 15
|
||||
|
||||
- name: clear prometheus time series database
|
||||
uri:
|
||||
url: http://127.0.0.1:9090/api/v1/admin/tsdb/delete_series?match[]=%7B__name__%3D~%22.%2B%22%7D
|
||||
method: POST
|
||||
status_code:
|
||||
- 200
|
||||
- 204
|
||||
|
||||
- name: wait
|
||||
pause:
|
||||
seconds: 5
|
||||
|
||||
- name: disable prometheus admin api
|
||||
lineinfile:
|
||||
path: /etc/default/prometheus
|
||||
regexp: "^ARGS=.*$"
|
||||
line: >-
|
||||
ARGS="--query.lookback-delta={{ prometheus_lookback_delta }}"
|
||||
insertbefore: BOF
|
||||
notify: restart prometheus
|
||||
|
||||
- name: flush handlers
|
||||
meta: flush_handlers
|
61
roles/prometheus/tasks/install.yml
Normal file
61
roles/prometheus/tasks/install.yml
Normal file
|
@ -0,0 +1,61 @@
|
|||
---
|
||||
|
||||
- name: install prometheus
|
||||
apt:
|
||||
name: prometheus
|
||||
state: present
|
||||
update_cache: yes
|
||||
|
||||
- name: start and enable prometheus
|
||||
systemd:
|
||||
name: prometheus.service
|
||||
state: started
|
||||
enabled: yes
|
||||
|
||||
- name: download blackbox exporter
|
||||
get_url:
|
||||
url: "https://github.com/prometheus/blackbox_exporter/releases/download/v{{ prometheus_blackbox_exporter_version }}/blackbox_exporter-{{ prometheus_blackbox_exporter_version }}.linux-amd64.tar.gz" # noqa 204
|
||||
dest: "/root/blackbox_exporter-{{ prometheus_blackbox_exporter_version }}.tar.gz"
|
||||
|
||||
- name: unpack blackbox exporter
|
||||
unarchive:
|
||||
src: "/root/blackbox_exporter-{{ prometheus_blackbox_exporter_version }}.tar.gz"
|
||||
dest: "/root"
|
||||
remote_src: yes
|
||||
|
||||
- name: install blackbox exporter
|
||||
copy:
|
||||
src: "/root/blackbox_exporter-{{ prometheus_blackbox_exporter_version }}.linux-amd64/blackbox_exporter"
|
||||
dest: /usr/local/bin/prometheus-blackbox-exporter
|
||||
remote_src: yes
|
||||
owner: root
|
||||
group: root
|
||||
mode: 0755
|
||||
|
||||
- name: install blackbox exporter defaults file
|
||||
template:
|
||||
src: etc-defaults-blackbox
|
||||
dest: /etc/default/prometheus-blackbox-exporter
|
||||
owner: root
|
||||
group: root
|
||||
mode: 0644
|
||||
|
||||
- name: install blackbox exporter service
|
||||
template:
|
||||
src: prometheus-blackbox-exporter-local.service
|
||||
dest: /etc/systemd/system/prometheus-blackbox-exporter-local.service
|
||||
owner: root
|
||||
group: root
|
||||
mode: 0644
|
||||
|
||||
- name: stop and diable native blackbox exporter
|
||||
systemd:
|
||||
name: prometheus-blackbox-exporter.service
|
||||
state: stopped
|
||||
enabled: no
|
||||
|
||||
- name: start and enable blackbox exporter
|
||||
systemd:
|
||||
name: prometheus-blackbox-exporter-local.service
|
||||
state: started
|
||||
enabled: yes
|
27
roles/prometheus/tasks/main.yml
Normal file
27
roles/prometheus/tasks/main.yml
Normal file
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
|
||||
- name: install prometheus and blackbox exporter
|
||||
import_tasks: install.yml
|
||||
tags:
|
||||
- "role::prometheus"
|
||||
- "role::prometheus:install"
|
||||
|
||||
- name: configure blackbox exporter
|
||||
import_tasks: blackbox.yml
|
||||
tags:
|
||||
- "role::prometheus"
|
||||
- "role::prometheus:blackbox"
|
||||
|
||||
- name: configure prometheus
|
||||
import_tasks: prometheus.yml
|
||||
tags:
|
||||
- "role::prometheus"
|
||||
- "role::prometheus:config"
|
||||
|
||||
# Some problems with "never" tag
|
||||
#- name: flush prometheus time series database
|
||||
# import_tasks: flush.yml
|
||||
# tags:
|
||||
# - "role::prometheus"
|
||||
# - "role::prometheus:flush"
|
||||
# - "never"
|
137
roles/prometheus/tasks/prometheus.yml
Normal file
137
roles/prometheus/tasks/prometheus.yml
Normal file
|
@ -0,0 +1,137 @@
|
|||
---
|
||||
|
||||
- name: create config fragment directory
|
||||
delegate_to: localhost
|
||||
run_once: yes
|
||||
file:
|
||||
path: "{{ playbook_dir }}/.prometheus/{{ item }}.d"
|
||||
state: directory
|
||||
loop:
|
||||
- conf
|
||||
- alerts
|
||||
|
||||
- name: list current config fragments
|
||||
delegate_to: localhost
|
||||
find:
|
||||
paths:
|
||||
- "{{ playbook_dir }}/.prometheus/conf.d/"
|
||||
- "{{ playbook_dir }}/.prometheus/alerts.d/"
|
||||
recurse: yes
|
||||
patterns: "*.yml"
|
||||
excludes: "00-base.yml"
|
||||
register: prometheus_register_current_fragments
|
||||
|
||||
- name: process current config fragments
|
||||
set_fact:
|
||||
prometheus_register_current_fragments:
|
||||
"{{ prometheus_register_current_fragments.files | map(attribute='path') | list }}"
|
||||
|
||||
- name: render prometheus base config
|
||||
delegate_to: localhost
|
||||
run_once: yes
|
||||
template:
|
||||
src: prometheus-base.yml
|
||||
dest: "{{ playbook_dir }}/.prometheus/conf.d/00-base.yml"
|
||||
|
||||
- name: render prometheus job configs
|
||||
delegate_to: localhost
|
||||
run_once: yes
|
||||
template:
|
||||
src: prometheus-job.yml
|
||||
dest: "{{ playbook_dir }}/.prometheus/conf.d/{{ '%02d' | format(counter+1) }}-job-{{ item.name }}.yml"
|
||||
vars:
|
||||
job: "{{ item }}"
|
||||
loop: "{{ hostvars[inventory_hostname] | dict2items | selectattr('key', 'match', '^prometheus_job_.+$') | map(attribute='value') | list }}"
|
||||
loop_control:
|
||||
index_var: counter
|
||||
register: prometheus_register_new_config_fragments
|
||||
|
||||
- name: remove newly created files from deletion list
|
||||
set_fact:
|
||||
prometheus_register_current_fragments:
|
||||
"{{ prometheus_register_current_fragments | difference(prometheus_register_new_config_fragments.results | map(attribute='dest') | list) }}"
|
||||
|
||||
- name: render prometheus alert base config
|
||||
delegate_to: localhost
|
||||
run_once: yes
|
||||
template:
|
||||
src: prometheus-alert-base.yml
|
||||
dest: .prometheus/alerts.d/00-base.yml
|
||||
|
||||
- name: render prometheus alert configs
|
||||
delegate_to: localhost
|
||||
run_once: yes
|
||||
copy:
|
||||
content: "{{ item.alerts | to_nice_yaml(indent=2) | indent(2, first=true) }}"
|
||||
dest: "{{ playbook_dir }}/.prometheus/alerts.d/{{ '%02d' | format(counter+1) }}-alert-{{ item.name }}.yml"
|
||||
loop: "{{ hostvars[inventory_hostname] | dict2items | selectattr('key', 'match', '^prometheus_job_.+$') | map(attribute='value') | list }}"
|
||||
loop_control:
|
||||
index_var: counter
|
||||
register: prometheus_register_new_alert_fragments
|
||||
|
||||
- name: remove newly created files from deletion list
|
||||
set_fact:
|
||||
prometheus_register_current_fragments:
|
||||
"{{ prometheus_register_current_fragments | difference(prometheus_register_new_alert_fragments.results | map(attribute='dest') | list) }}"
|
||||
|
||||
- name: render host-specific prometheus alert configs
|
||||
delegate_to: localhost
|
||||
copy:
|
||||
content: "{{ hostvars[item].prometheus_host_specific_alerts | to_nice_yaml(indent=2) | indent(2, first=true) }}"
|
||||
dest: "{{ playbook_dir }}/.prometheus/alerts.d/{{ '99-host-%s' | format(hostvars[item].inventory_hostname) }}-alerts.yml"
|
||||
when: "'prometheus_host_specific_alerts' in hostvars[item]"
|
||||
loop: "{{ hostvars.keys() | list }}"
|
||||
register: prometheus_register_new_host_specific_alert_fragments
|
||||
|
||||
- name: remove newly created files from deletion list
|
||||
set_fact:
|
||||
prometheus_register_current_fragments:
|
||||
"{{ prometheus_register_current_fragments | difference(prometheus_register_new_host_specific_alert_fragments.results | selectattr('dest', 'defined') | map(attribute='dest') | list) }}" # noqa 204
|
||||
|
||||
- name: delete old config fragments
|
||||
delegate_to: localhost
|
||||
file:
|
||||
path: "{{ item }}"
|
||||
state: absent
|
||||
loop: "{{ prometheus_register_current_fragments }}"
|
||||
|
||||
- name: assemble fragment directories
|
||||
delegate_to: localhost
|
||||
assemble:
|
||||
src: "{{ playbook_dir }}/.prometheus/{{ item.local }}.d/"
|
||||
dest: "{{ playbook_dir }}/.prometheus/{{ item.remote }}.yml"
|
||||
delimiter: "\n\n"
|
||||
loop:
|
||||
- local: conf
|
||||
remote: prometheus
|
||||
- local: alerts
|
||||
remote: alert_rules
|
||||
|
||||
- name: upload config files to host
|
||||
copy:
|
||||
src: "{{ playbook_dir }}/.prometheus/prometheus.yml"
|
||||
dest: "/etc/prometheus/prometheus.yml"
|
||||
owner: root
|
||||
group: root
|
||||
mode: 0644
|
||||
validate: /usr/bin/promtool check config %s
|
||||
notify: restart prometheus
|
||||
|
||||
- name: upload alert config file to host
|
||||
copy:
|
||||
src: "{{ playbook_dir }}/.prometheus/alert_rules.yml"
|
||||
dest: "/etc/prometheus/alert_rules.yml"
|
||||
owner: root
|
||||
group: root
|
||||
mode: 0644
|
||||
validate: /usr/bin/promtool check rules %s
|
||||
notify: restart prometheus
|
||||
|
||||
- name: configure prometheus lookback delta
|
||||
lineinfile:
|
||||
path: /etc/default/prometheus
|
||||
regexp: "^ARGS=.*$"
|
||||
line: >-
|
||||
ARGS="--query.lookback-delta={{ prometheus_lookback_delta }}"
|
||||
insertbefore: BOF
|
||||
notify: restart prometheus
|
180
roles/prometheus/templates/blackbox.yml
Normal file
180
roles/prometheus/templates/blackbox.yml
Normal file
|
@ -0,0 +1,180 @@
|
|||
---
|
||||
{{ ansible_managed | comment }}
|
||||
|
||||
modules:
|
||||
|
||||
http_2xx:
|
||||
prober: http
|
||||
http:
|
||||
valid_status_codes:
|
||||
- 200
|
||||
- 204 # No content is ok, often returned by some APIs
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
tls_config:
|
||||
insecure_skip_verify: true
|
||||
|
||||
# A simple ICMP probe, used for network equipment, such as switches
|
||||
icmp:
|
||||
prober: icmp
|
||||
|
||||
# Check that the address returns a 401 Unautorized when queried without user credentials
|
||||
http_auth_required:
|
||||
prober: http
|
||||
http:
|
||||
valid_status_codes:
|
||||
- 401
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
tls_config:
|
||||
insecure_skip_verify: true
|
||||
|
||||
http_modified:
|
||||
prober: http
|
||||
http:
|
||||
valid_status_codes:
|
||||
- 200
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
tls_config:
|
||||
insecure_skip_verify: true
|
||||
|
||||
ssh:
|
||||
prober: tcp
|
||||
timeout: 2s
|
||||
tcp:
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
query_response:
|
||||
- expect: "^SSH-2.0.*"
|
||||
|
||||
tls:
|
||||
prober: tcp
|
||||
timeout: 2s
|
||||
tcp:
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
tls: true
|
||||
|
||||
znc:
|
||||
prober: tcp
|
||||
timeout: 2s
|
||||
tcp:
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
tls: true
|
||||
query_response:
|
||||
- send: "PASS :{{ blackbox_znc_pass }}\r\n"
|
||||
- send: "NICK {{ blackbox_znc_nick }}\r\n"
|
||||
- send: "USER {{ blackbox_znc_nick }} * * :{{ blackbox_znc_fullname }}\r\n"
|
||||
- expect: "^:.* 464 {{ blackbox_znc_nick }} :.*$"
|
||||
|
||||
irc:
|
||||
prober: tcp
|
||||
timeout: 2s
|
||||
tcp:
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
query_response:
|
||||
- send: "NICK {{ blackbox_irc_nick }}\r\n"
|
||||
- send: "USER {{ blackbox_irc_nick }} * * :{{ blackbox_irc_fullname }}\r\n"
|
||||
- expect: "^:.* {{ blackbox_irc_nick }} :.*$"
|
||||
|
||||
ircs:
|
||||
prober: tcp
|
||||
timeout: 2s
|
||||
tcp:
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
tls: true
|
||||
{% if blackbox_ircs_tlsname is defined -%}
|
||||
tls_config:
|
||||
server_name: "{{ blackbox_ircs_tlsname }}"
|
||||
{% endif -%}
|
||||
query_response:
|
||||
- send: "NICK {{ blackbox_ircs_nick }}\r\n"
|
||||
- send: "USER {{ blackbox_ircs_nick }} * * :{{ blackbox_ircs_fullname }}\r\n"
|
||||
- expect: "^:.* {{ blackbox_ircs_nick }} :.*$"
|
||||
|
||||
smtp:
|
||||
prober: tcp
|
||||
timeout: 2s
|
||||
tcp:
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
query_response:
|
||||
- expect: "^220 .*SMTP .*$"
|
||||
- send: "HELO {{ blackbox_smtp_name }}"
|
||||
- expect: "^250 .*$"
|
||||
- send: "QUIT"
|
||||
|
||||
smtps:
|
||||
prober: tcp
|
||||
timeout: 2s
|
||||
tcp:
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
tls: true
|
||||
query_response:
|
||||
- expect: "^220 .*SMTP .*$"
|
||||
- send: "HELO {{ blackbox_smtp_name }}"
|
||||
- expect: "^250 .*$"
|
||||
- send: "QUIT"
|
||||
|
||||
imap:
|
||||
prober: tcp
|
||||
timeout: 2s
|
||||
tcp:
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
query_response:
|
||||
- expect: "^\\* OK .*$"
|
||||
|
||||
imaps:
|
||||
prober: tcp
|
||||
timeout: 2s
|
||||
tcp:
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
tls: true
|
||||
query_response:
|
||||
- expect: "^\\* OK .*$"
|
||||
|
||||
nextcloud_installed:
|
||||
prober: http
|
||||
http:
|
||||
valid_status_codes:
|
||||
- 200
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
tls_config:
|
||||
insecure_skip_verify: true
|
||||
fail_if_body_not_matches_regexp:
|
||||
- "\"installed\":true"
|
||||
|
||||
nextcloud_maintenance:
|
||||
prober: http
|
||||
http:
|
||||
valid_status_codes:
|
||||
- 200
|
||||
{% if blackbox_prefer_ipv4 -%}
|
||||
preferred_ip_protocol: ip4
|
||||
{% endif -%}
|
||||
tls_config:
|
||||
insecure_skip_verify: true
|
||||
fail_if_body_not_matches_regexp:
|
||||
- "\"maintenance\":false"
|
||||
- "\"needsDbUpgrade\":false"
|
12
roles/prometheus/templates/etc-defaults-blackbox
Normal file
12
roles/prometheus/templates/etc-defaults-blackbox
Normal file
|
@ -0,0 +1,12 @@
|
|||
{{ ansible_managed | comment }}
|
||||
|
||||
ARGS='--config.file /etc/prometheus/blackbox.yml'
|
||||
|
||||
# Usage of prometheus-blackbox-exporter:
|
||||
# --config.file="blackbox.yml"
|
||||
# Blackbox exporter configuration file.
|
||||
# --web.listen-address=":9115"
|
||||
# The address to listen on for HTTP requests.
|
||||
# --timeout-offset=0.5 Offset to subtract from timeout in seconds.
|
||||
# --log.level=info Only log messages with the given severity or above.
|
||||
# One of: [debug, info, warn, error]
|
6
roles/prometheus/templates/prometheus-alert-base.yml
Normal file
6
roles/prometheus/templates/prometheus-alert-base.yml
Normal file
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
{{ ansible_managed | comment }}
|
||||
|
||||
groups:
|
||||
- name: 'ccc-basel'
|
||||
rules:
|
25
roles/prometheus/templates/prometheus-base.yml
Normal file
25
roles/prometheus/templates/prometheus-base.yml
Normal file
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
{{ ansible_managed | comment }}
|
||||
|
||||
global:
|
||||
scrape_interval: {{ prometheus_scrape_interval }}
|
||||
evaluation_interval: {{ prometheus_evaluation_interval }}
|
||||
external_labels:
|
||||
monitor: {{ prometheus_monitor_name | default(ansible_hostname) }}
|
||||
|
||||
alerting:
|
||||
alertmanagers:
|
||||
- static_configs:
|
||||
{% if groups['alertmanagers'] | default([]) | length == 0 -%}
|
||||
- targets: []
|
||||
{%- else -%}
|
||||
- targets:
|
||||
{% for host in groups['alertmanagers'] -%}
|
||||
- "{{ hostvars[host].inventory_hostname }}:{{ hostvars[host].prometheus_alertmanager_port | default(prometheus_alertmanager_port) | int }}"
|
||||
{%- endfor %}
|
||||
{%- endif %}
|
||||
|
||||
rule_files:
|
||||
- /etc/prometheus/alert_rules.yml
|
||||
|
||||
scrape_configs:
|
|
@ -0,0 +1,15 @@
|
|||
[Unit]
|
||||
Description=Blackbox exporter for Prometheus
|
||||
Documentation=https://prometheus.io/docs/introduction/overview/
|
||||
|
||||
[Service]
|
||||
Restart=always
|
||||
User=prometheus
|
||||
EnvironmentFile=/etc/default/prometheus-blackbox-exporter
|
||||
ExecStart=/usr/local/bin/prometheus-blackbox-exporter $ARGS
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
AmbientCapabilities=CAP_NET_RAW
|
||||
NoNewPrivileges=true
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
43
roles/prometheus/templates/prometheus-job.yml
Normal file
43
roles/prometheus/templates/prometheus-job.yml
Normal file
|
@ -0,0 +1,43 @@
|
|||
|
||||
- job_name: {{ job.name }}
|
||||
{% if 'scrape_interval' in job -%}
|
||||
scrape_interval: {{ job.scrape_interval }}
|
||||
{% endif -%}
|
||||
{% if 'blackbox' in job -%}
|
||||
metrics_path: /probe
|
||||
relabel_configs:
|
||||
- source_labels:
|
||||
- __address__
|
||||
target_label: __param_target
|
||||
- source_labels:
|
||||
- __param_target
|
||||
target_label: instance
|
||||
- target_label: __address__
|
||||
replacement: localhost:9115
|
||||
params:
|
||||
module:
|
||||
{% for module in job.blackbox.modules -%}
|
||||
- {{ module }}
|
||||
{%- endfor %}
|
||||
{% endif %}
|
||||
|
||||
{% if 'scheme' in job -%}
|
||||
scheme: {{ job.scheme }}
|
||||
{% endif -%}
|
||||
|
||||
{% if 'basic_auth_user' in job and 'basic_auth_password' in job -%}
|
||||
basic_auth:
|
||||
username: {{ job.basic_auth_user }}
|
||||
password: {{ job.basic_auth_password }}
|
||||
{% endif -%}
|
||||
|
||||
static_configs:
|
||||
- targets:
|
||||
{% for host in groups['all'] -%}
|
||||
{% if hostvars[host]['monitor_' + job.name] | default(false) | bool -%}
|
||||
{%- set fstring = job.defaults | default({}) -%}
|
||||
{%- set ignored = fstring.update(hostvars[host]) -%}
|
||||
- {{ job.target_format | format(**fstring) }}
|
||||
{% endif -%}
|
||||
{% endfor -%}
|
||||
|
10
roles/prometheus_node/defaults/main.yml
Normal file
10
roles/prometheus_node/defaults/main.yml
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
|
||||
debian_mirror: ftp.ch.debian.org
|
||||
|
||||
prometheus_node_exporter_args: {}
|
||||
|
||||
smartmon_exporter_force_off: no
|
||||
prometheus_hpsa_collector: no
|
||||
|
||||
prometheus_textfile_collectors: {}
|
18
roles/prometheus_node/handlers/main.yml
Normal file
18
roles/prometheus_node/handlers/main.yml
Normal file
|
@ -0,0 +1,18 @@
|
|||
---
|
||||
|
||||
- name: ucr commit
|
||||
command: /usr/sbin/ucr commit
|
||||
|
||||
- name: systemctl daemon-reload
|
||||
systemd:
|
||||
daemon_reload: yes
|
||||
|
||||
- name: restart prometheus-zpool-collector
|
||||
systemd:
|
||||
name: prometheus-node-exporter-zpool.timer
|
||||
state: restarted
|
||||
|
||||
- name: restart prometheus-node-exporter
|
||||
systemd:
|
||||
name: prometheus-node-exporter.service
|
||||
state: restarted
|
42
roles/prometheus_node/tasks/collectors.yml
Normal file
42
roles/prometheus_node/tasks/collectors.yml
Normal file
|
@ -0,0 +1,42 @@
|
|||
---
|
||||
|
||||
- name: install collector script
|
||||
template:
|
||||
src: "{{ basename }}.j2"
|
||||
dest: "/usr/local/bin/{{ basename }}"
|
||||
mode: 0750
|
||||
vars:
|
||||
basename: "{{ item.value.template_base_name | default('prometheus-node-exporter-' + item.key + '.sh') }}"
|
||||
loop: "{{ prometheus_textfile_collectors | dict2items }}"
|
||||
|
||||
- name: install collector config
|
||||
copy:
|
||||
content: "{{ item.value.config.content }}"
|
||||
dest: "{{ item.value.config.filename }}"
|
||||
when: "'config' in item.value"
|
||||
loop: "{{ prometheus_textfile_collectors | dict2items }}"
|
||||
|
||||
- name: install collector service
|
||||
template:
|
||||
src: "prometheus-collector.service.j2"
|
||||
dest: "/etc/systemd/system/prometheus-node-exporter-{{ item.key }}.service"
|
||||
vars:
|
||||
basename: "{{ item.value.template_base_name | default('prometheus-node-exporter-' + item.key + '.sh') }}"
|
||||
loop: "{{ prometheus_textfile_collectors | dict2items }}"
|
||||
notify:
|
||||
- systemctl daemon-reload
|
||||
|
||||
- name: install collector timer
|
||||
template:
|
||||
src: "prometheus-collector.timer.j2"
|
||||
dest: "/etc/systemd/system/prometheus-node-exporter-{{ item.key }}.timer"
|
||||
loop: "{{ prometheus_textfile_collectors | dict2items }}"
|
||||
notify:
|
||||
- systemctl daemon-reload
|
||||
|
||||
- name: start and enable collector timer
|
||||
systemd:
|
||||
name: "prometheus-node-exporter-{{ item.key }}.timer"
|
||||
state: started
|
||||
enabled: yes
|
||||
loop: "{{ prometheus_textfile_collectors | dict2items }}"
|
9
roles/prometheus_node/tasks/config.yml
Normal file
9
roles/prometheus_node/tasks/config.yml
Normal file
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
|
||||
- name: render /etc/default/prometheus-node-exporter
|
||||
template:
|
||||
src: prometheus-node-exporter.default.j2
|
||||
dest: /etc/default/prometheus-node-exporter
|
||||
vars:
|
||||
args: "{{ prometheus_node_exporter_args }}"
|
||||
notify: restart prometheus-node-exporter
|
32
roles/prometheus_node/tasks/debian10.yml
Normal file
32
roles/prometheus_node/tasks/debian10.yml
Normal file
|
@ -0,0 +1,32 @@
|
|||
---
|
||||
|
||||
- name: install prometheus-node-exporter
|
||||
apt:
|
||||
name: prometheus-node-exporter
|
||||
state: present
|
||||
update_cache: yes
|
||||
|
||||
- name: start prometheus-node-exporter.service
|
||||
systemd:
|
||||
name: prometheus-node-exporter.service
|
||||
state: started
|
||||
enabled: yes
|
||||
|
||||
- name: start prometheus-node-exporter-apt.timer
|
||||
systemd:
|
||||
name: prometheus-node-exporter-apt.timer
|
||||
state: started
|
||||
enabled: yes
|
||||
|
||||
- name: install smartmontools
|
||||
apt:
|
||||
name: smartmontools
|
||||
state: present
|
||||
when:
|
||||
- ansible_virtualization_role == 'host'
|
||||
|
||||
- name: start prometheus-node-exporter-smartmon.timer
|
||||
systemd:
|
||||
name: prometheus-node-exporter-smartmon.timer
|
||||
state: "{{ (ansible_virtualization_role == 'host' and not smartmon_exporter_force_off) | ternary('started', 'stopped') }}"
|
||||
enabled: "{{ ansible_virtualization_role == 'host' }}"
|
35
roles/prometheus_node/tasks/debian11.yml
Normal file
35
roles/prometheus_node/tasks/debian11.yml
Normal file
|
@ -0,0 +1,35 @@
|
|||
---
|
||||
|
||||
- name: install prometheus-node-exporter
|
||||
apt:
|
||||
name: "{{ item }}"
|
||||
state: present
|
||||
update_cache: yes
|
||||
loop:
|
||||
- prometheus-node-exporter
|
||||
- prometheus-node-exporter-collectors
|
||||
|
||||
- name: start prometheus-node-exporter.service
|
||||
systemd:
|
||||
name: prometheus-node-exporter.service
|
||||
state: started
|
||||
enabled: yes
|
||||
|
||||
- name: start prometheus-node-exporter-apt.timer
|
||||
systemd:
|
||||
name: prometheus-node-exporter-apt.timer
|
||||
state: started
|
||||
enabled: yes
|
||||
|
||||
- name: install smartmontools
|
||||
apt:
|
||||
name: smartmontools
|
||||
state: present
|
||||
when:
|
||||
- ansible_virtualization_role == 'host'
|
||||
|
||||
- name: start prometheus-node-exporter-smartmon.timer
|
||||
systemd:
|
||||
name: prometheus-node-exporter-smartmon.timer
|
||||
state: "{{ (ansible_virtualization_role == 'host' and not smartmon_exporter_force_off) | ternary('started', 'stopped') }}"
|
||||
enabled: "{{ ansible_virtualization_role == 'host' }}"
|
39
roles/prometheus_node/tasks/main.yml
Normal file
39
roles/prometheus_node/tasks/main.yml
Normal file
|
@ -0,0 +1,39 @@
|
|||
---
|
||||
|
||||
- name: install node exporter on debian buster
|
||||
import_tasks: debian10.yml
|
||||
when:
|
||||
- "ansible_lsb.id == 'Debian' or ansible_lsb.id == 'Raspbian'"
|
||||
- "ansible_lsb.major_release | int == 10"
|
||||
tags:
|
||||
- "role::prometheus_node"
|
||||
- "role::prometheus_node:debian"
|
||||
|
||||
- name: install node exporter on debian bullseye and later
|
||||
import_tasks: debian11.yml
|
||||
when:
|
||||
- "ansible_lsb.id == 'Debian' or ansible_lsb.id == 'Raspbian'"
|
||||
- "ansible_lsb.major_release | int == 11"
|
||||
tags:
|
||||
- "role::prometheus_node"
|
||||
- "role::prometheus_node:debian"
|
||||
|
||||
- name: install node exporter on univention corporate server
|
||||
import_tasks: ucs.yml
|
||||
when:
|
||||
- "ansible_lsb.id == 'Univention'"
|
||||
tags:
|
||||
- "role::prometheus_node"
|
||||
- "role::prometheus_node:univention"
|
||||
|
||||
- name: install custom textfile collectors
|
||||
import_tasks: collectors.yml
|
||||
tags:
|
||||
- "role::prometheus_node"
|
||||
- "role::prometheus_node:collectors"
|
||||
|
||||
- name: configure prometheus node exporter
|
||||
import_tasks: config.yml
|
||||
tags:
|
||||
- "role::prometheus_node"
|
||||
- "role::prometheus_node:config"
|
71
roles/prometheus_node/tasks/ucs.yml
Normal file
71
roles/prometheus_node/tasks/ucs.yml
Normal file
|
@ -0,0 +1,71 @@
|
|||
---
|
||||
|
||||
- name: check if package files already exist
|
||||
stat:
|
||||
path: "/root/{{ item.name }}_{{ item.version }}_amd64.deb"
|
||||
loop:
|
||||
- name: prometheus-node-exporter
|
||||
version: 0.17.0+ds-3+b11
|
||||
- name: moreutils
|
||||
version: 0.62-1
|
||||
register: prometheus_node_register_ucs_deb_stat
|
||||
|
||||
- name: download packages
|
||||
get_url:
|
||||
url: "https://{{ debian_mirror }}/debian/pool/main/{{ pfirst | urlencode() }}/{{ package | urlencode() }}/{{ package | urlencode() }}_{{ version | urlencode() }}_amd64.deb" # noqa 204
|
||||
dest: "/root/{{ package }}_{{ version }}_amd64.deb"
|
||||
vars:
|
||||
pfirst: "{{ item.item.name[:1] }}"
|
||||
package: "{{ item.item.name }}"
|
||||
version: "{{ item.item.version }}"
|
||||
when: not item.stat.exists
|
||||
loop: "{{ prometheus_node_register_ucs_deb_stat.results }}"
|
||||
|
||||
- name: install packages
|
||||
apt:
|
||||
deb: "/root/{{ item.item.name }}_{{ item.item.version }}_amd64.deb"
|
||||
loop: "{{ prometheus_node_register_ucs_deb_stat.results }}"
|
||||
|
||||
- name: start prometheus-node-exporter.service
|
||||
systemd:
|
||||
name: prometheus-node-exporter.service
|
||||
state: started
|
||||
enabled: yes
|
||||
|
||||
- name: start prometheus-node-exporter-apt.timer
|
||||
systemd:
|
||||
name: prometheus-node-exporter-apt.timer
|
||||
state: started
|
||||
enabled: yes
|
||||
|
||||
- name: install smartmontools
|
||||
apt:
|
||||
name: smartmontools
|
||||
state: present
|
||||
when:
|
||||
- ansible_virtualization_role == 'host'
|
||||
|
||||
- name: start prometheus-node-exporter-smartmon.timer
|
||||
systemd:
|
||||
name: prometheus-node-exporter-smartmon.timer
|
||||
state: "{{ (ansible_virtualization_role == 'host' and not smartmon_exporter_force_off) | ternary('started', 'stopped') }}"
|
||||
enabled: "{{ ansible_virtualization_role == 'host' }}"
|
||||
|
||||
- name: check firewall settings in ucr
|
||||
command: "/usr/sbin/ucr get {{ item.name }}"
|
||||
changed_when: no
|
||||
check_mode: no
|
||||
loop:
|
||||
- name: security/packetfilter/tcp/9101/all
|
||||
value: ACCEPT
|
||||
- name: security/packetfilter/tcp/9101/all/en
|
||||
value: prometheus-node-exporter
|
||||
register: prometheus_node_register_ucs_ucr
|
||||
|
||||
- name: set firewall settings in ucr
|
||||
command: "/usr/sbin/ucr set {{ item.item.name }}={{ item.item.value }}"
|
||||
changed_when: yes
|
||||
notify:
|
||||
- ucr commit
|
||||
when: item.stdout != item.item.value
|
||||
loop: "{{ prometheus_node_register_ucs_ucr.results }}"
|
|
@ -0,0 +1,9 @@
|
|||
{{ ansible_managed | comment }}
|
||||
|
||||
[Unit]
|
||||
Description=Collect {{ item.key }} metrics for prometheus-node-exporter
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
Environment=TMPDIR=/var/lib/prometheus/node-exporter
|
||||
ExecStart=/bin/bash -c "/usr/local/bin/{{ basename }} | sponge /var/lib/prometheus/node-exporter/{{ item.key }}.prom"
|
|
@ -0,0 +1,11 @@
|
|||
{{ ansible_managed | comment }}
|
||||
|
||||
[Unit]
|
||||
Description=Collect {{ item.key }} metrics for prometheus-node-exporter every {{ item.value.interval }}
|
||||
|
||||
[Timer]
|
||||
OnBootSec=0
|
||||
OnUnitActiveSec={{ item.value.interval }}
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,3 @@
|
|||
{{ ansible_managed | comment }}
|
||||
|
||||
ARGS="{% for k, v in args.items() %}--{{ k }}=\"{{ v }}\" {% endfor %}"
|
4
roles/prometheus_node_openbsd/defaults/main.yml
Normal file
4
roles/prometheus_node_openbsd/defaults/main.yml
Normal file
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
|
||||
node_exporter_textfile_dir: /var/node_exporter
|
||||
node_exporter_flags: ""
|
7
roles/prometheus_node_openbsd/handlers/main.yml
Normal file
7
roles/prometheus_node_openbsd/handlers/main.yml
Normal file
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
|
||||
- name: start node exporter
|
||||
command: /usr/sbin/rcctl start node_exporter
|
||||
|
||||
- name: restart node exporter
|
||||
command: /usr/sbin/rcctl restart node_exporter
|
22
roles/prometheus_node_openbsd/tasks/install.yml
Normal file
22
roles/prometheus_node_openbsd/tasks/install.yml
Normal file
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
|
||||
- name: install prometheus node exporter
|
||||
openbsd_pkg:
|
||||
name: node_exporter-0.18.0
|
||||
|
||||
- name: create textfile collector directory
|
||||
file:
|
||||
path: "{{ node_exporter_textfile_dir }}"
|
||||
state: directory
|
||||
owner: root
|
||||
group: _nodeexporter
|
||||
mode: 0750
|
||||
|
||||
- name: enable node exporter
|
||||
lineinfile:
|
||||
path: /etc/rc.conf.local
|
||||
line: "node_exporter_flags=--collector.textfile.directory={{ node_exporter_textfile_dir }} {{ node_exporter_flags }}"
|
||||
regexp: ^node_exporter_flags=.*$
|
||||
notify:
|
||||
- start node exporter
|
||||
- restart node exporter
|
13
roles/prometheus_node_openbsd/tasks/main.yml
Normal file
13
roles/prometheus_node_openbsd/tasks/main.yml
Normal file
|
@ -0,0 +1,13 @@
|
|||
---
|
||||
|
||||
- name: install prometheus node exporter
|
||||
import_tasks: install.yml
|
||||
tags:
|
||||
- 'role::prometheus_node_openbsd'
|
||||
- 'role::prometheus_node_openbsd:install'
|
||||
|
||||
- name: install syspatch check
|
||||
import_tasks: syspatch.yml
|
||||
tags:
|
||||
- 'role::prometheus_node_openbsd'
|
||||
- 'role::prometheus_node_openbsd:syspatch'
|
15
roles/prometheus_node_openbsd/tasks/syspatch.yml
Normal file
15
roles/prometheus_node_openbsd/tasks/syspatch.yml
Normal file
|
@ -0,0 +1,15 @@
|
|||
---
|
||||
|
||||
- name: create syspatch check script
|
||||
template:
|
||||
src: syspatch.sh.j2
|
||||
dest: /usr/local/bin/prometheus-node-exporter-syspatch.sh
|
||||
owner: root
|
||||
group: bin
|
||||
mode: 0755
|
||||
|
||||
- name: register syspatch check script in cron
|
||||
lineinfile:
|
||||
path: /etc/daily.local
|
||||
regexp: '^/usr/local/bin/prometheus-node-exporter-syspatch.sh$'
|
||||
line: '/usr/local/bin/prometheus-node-exporter-syspatch.sh'
|
16
roles/prometheus_node_openbsd/templates/syspatch.sh.j2
Normal file
16
roles/prometheus_node_openbsd/templates/syspatch.sh.j2
Normal file
|
@ -0,0 +1,16 @@
|
|||
#!/bin/sh
|
||||
{{ ansible_managed | comment}}
|
||||
|
||||
N_PATCHES="$(/usr/sbin/syspatch -c | wc -l)"
|
||||
TMPFILE="$(/usr/bin/mktemp /tmp/prometheus-node-exporter-syspatch.XXXXXX)"
|
||||
|
||||
/bin/cat > "${TMPFILE}" <<EOF
|
||||
# TYPE syspatch_upgrades_pending gauge
|
||||
# HELP syspatch_upgrades_pending Number of available system patches
|
||||
syspatch_upgrades_pending ${N_PATCHES}
|
||||
EOF
|
||||
|
||||
/sbin/chown _nodeexporter:_nodeexporter "${TMPFILE}"
|
||||
/bin/chmod 0640 "${TMPFILE}"
|
||||
# Atomic move
|
||||
/bin/mv "$TMPFILE" "{{ node_exporter_textfile_dir }}/syspatch.prom"
|
Loading…
Reference in a new issue