DEV-0000 rework blackbox exporter

main
Ketelsen, Sven 2 years ago
parent 5ce2d20d07
commit 59c22626f4

@ -896,7 +896,7 @@ run-setup-digitalocean:
- '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" >> ~/.ssh/config'
- ssh-add -L
- ansible-playbook -i stage-digitalocean setup.yml --vault-password-file /tmp/vault-pass -t common -u gitlabci
- ansible-playbook -i stage-digitalocean external_monitoring.yml --vault-password-file /tmp/vault-pass -t common -u gitlabci
- ansible-playbook -i stage-digitalocean smardigo.yml --vault-password-file /tmp/vault-pass -u gitlabci
after_script:
- rm /tmp/vault-pass
only:

@ -1,86 +0,0 @@
---
- name: 'apply setup to {{ host | default("all") }}'
hosts: '{{ host | default("all") }}'
serial: "{{ serial_number | default(5) }}"
become: yes
tasks:
- name: "Set VARs"
set_fact:
prometheus_endpoints_all_stages:
- "{{ lookup('community.general.dig', 'devnso-prometheus-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'qanso-prometheus-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodnso-prometheus-01.' + domain ) }}"
k8s_nodes_devnso:
- "{{ lookup('community.general.dig', 'devnso-kube-node-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'devnso-kube-node-02.' + domain ) }}"
- "{{ lookup('community.general.dig', 'devnso-kube-node-03.' + domain ) }}"
- "{{ lookup('community.general.dig', 'devnso-kube-node-04.' + domain ) }}"
- "{{ lookup('community.general.dig', 'devnso-kube-node-05.' + domain ) }}"
k8s_nodes_qanso:
- "{{ lookup('community.general.dig', 'qanso-kube-node-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'qanso-kube-node-02.' + domain ) }}"
- "{{ lookup('community.general.dig', 'qanso-kube-node-03.' + domain ) }}"
- "{{ lookup('community.general.dig', 'qanso-kube-node-04.' + domain ) }}"
- "{{ lookup('community.general.dig', 'qanso-kube-node-05.' + domain ) }}"
k8s_nodes_prodnso:
- "{{ lookup('community.general.dig', 'prodnso-kube-node-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodnso-kube-node-02.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodnso-kube-node-03.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodnso-kube-node-04.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodnso-kube-node-05.' + domain ) }}"
k8s_nodes_mobene:
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-02.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-03.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-04.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-05.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-06.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-07.' + domain ) }}"
- name: "Allow SSH in UFW"
ufw:
rule: limit
port: 22
proto: tcp
src: "{{ item }}"
loop: "{{ ip_whitelist }}"
- name: "Allow port 9100 for node-exporter in UFW"
ufw:
rule: allow
port: 9100
proto: tcp
src: "{{ item }}"
loop: "{{ prometheus_endpoints_all_stages }}"
- name: "Allow port 9115 for blackbox-exporter in UFW"
ufw:
rule: allow
port: 9115
proto: tcp
src: "{{ item }}"
loop: "{{ prometheus_endpoints_all_stages + ip_whitelist + k8s_nodes_mobene + k8s_nodes_devnso + k8s_nodes_qanso + k8s_nodes_prodnso + k8s_nodes_demompmx }}"
- name: "Set firewall default policy"
ufw:
state: enabled
policy: reject
- name: "configure ssh_hardening"
include_role:
# include role from collection called 'devsec'
name: devsec.hardening.ssh_hardening
apply:
tags:
- ssh_hardening
tags:
- ssh_hardening
- name: "Install blackbox-exporter via include_role"
include_role:
name: cloudalchemy.blackbox-exporter
apply:
tags:
- blackbox
tags:
- blackbox

@ -1,16 +1,9 @@
---
docker_enabled: false
traefik_enabled: false
filebeat_enabled: false
metricbeat_enabled: false
monitor_port_system: 9100
blackbox_exporter_version: 0.23.0
#blackbox_exporter_cli_flags:
# log.level: "debug"
blackbox_exporter_version: "{{ prom_blackbox_exporter_version }}"
blackbox_exporter_configuration_modules:
http_2xx:
http:
@ -18,25 +11,21 @@ blackbox_exporter_configuration_modules:
valid_status_codes: []
prober: http
timeout: 5s
http_3xx:
http:
method: GET
valid_status_codes:
- 301
- 302
- 303
- 304
- 305
prober: http
timeout: 5s
http_4xx:
http:
method: GET
valid_status_codes:
- 401
- 402
- 403
- 404
- 405
prober: http
timeout: 5s

@ -76,7 +76,7 @@ hcloud_firewall_objects:
protocol: tcp
port: '443'
source_ips:
- "{{ lookup('community.general.dig', 'dev-blackbox-01.smardigo.digital' ) }}/32"
- "{{ lookup('community.general.dig', '{{ shared_service_hostname_blackbox_exporter }}' ) }}/32"
destination_ips: []
description: "Allow access to Blackbox Monitoring for whitelisted ips"
apply_to:

@ -170,6 +170,7 @@ service_port_sonarqube: "9000"
service_port_pgadmin: "9001"
service_port_phpmyadmin: "9002"
service_port_node_exporter: "9100"
service_port_blackbox_exporter: "9115"
service_port_elasticsearch: "9200"
service_port_wireguard: "51820"

@ -2,19 +2,15 @@
# node exporter exposes data only into the private network
node_exporter_listen_address: "{{ stage_private_server_ip }}"
# TODO the blackbox exporter shouldn't be DEV tagged at all
blackbox_exporter_fqdn: "dev-blackbox-01.{{ domain }}"
blackbox_http_2xx_targets:
- "{{ shared_service_kube_url_awx }}"
- "{{ shared_service_url_gitea }}"
- "{{ shared_service_url_harbor }}"
- "{{ shared_service_url_keycloak }}/auth/"
- "{{ shared_service_url_kibana }}"
blackbox_http_4xx_targets:
- "{{ shared_service_kube_url_awx }}"
- "{{ shared_service_url_management }}"
blackbox_http_2xx_additional_targets: []
prometheus_tsdb_rentention_time: "4w"
prometheus_federation_enabled: true

@ -22,6 +22,9 @@ shared_service_hostname_grafana: "{{ stage }}-prometheus-01-grafana.{{ domain_en
shared_service_hostname_logstash: "{{ stage }}-elastic-stack-logstash-01"
# TODO the blackbox exporter shouldn't be DEV tagged at all
shared_service_hostname_blackbox_exporter: "devnso-blackbox-01.smardigo.digital"
# use private loadbalancer ip for all kubernetes services
stage_kube: "{{ stage }}"
shared_service_kube_url_argocd: "https://{{ shared_service_kube_hostname_argocd }}"

@ -17,7 +17,7 @@ keycloak_version: "21.0.2.7"
pgadmin4_version: "7.4"
prom_alertmanager_version: "v0.25.0"
prom_blackbox_exporter_version: "v0.24.0"
prom_blackbox_exporter_version: "0.24.0"
prom_prometheus_version: "v2.44.0"
prom_prom2teams_version: "3.2.3" # TODO 4.2.1

@ -76,7 +76,7 @@ hcloud_firewall_objects:
protocol: tcp
port: '443'
source_ips:
- "{{ lookup('community.general.dig', 'dev-blackbox-01.smardigo.digital' ) }}/32"
- "{{ lookup('community.general.dig', '{{ shared_service_hostname_blackbox_exporter }}' ) }}/32"
destination_ips: []
description: null
apply_to:

@ -0,0 +1,9 @@
---
stage_server_infos: []
docker_enabled: false
traefik_enabled: false
filebeat_enabled: false
metricbeat_enabled: false
monitor_port_system: "{{ service_port_node_exporter }}"

@ -72,15 +72,12 @@
when:
- "'hcloud' in group_names"
tasks:
- name: "Create server in DO-cloud via include_tasks"
include_role:
name: digitalocean
tasks_from: _create_server
- role: digitalocean
vars:
droplet:
name: dev-blackbox-01
name: devnso-blackbox-01
tags:
- stage_devnso
- service_blackbox
- stage_dev
when: "'digitalocean' in group_names"
when:
- "'digitalocean' in group_names"

@ -0,0 +1,90 @@
---
- name: "Getting ips for all monitoring servers"
set_fact:
prometheus_endpoints:
- "{{ lookup('community.general.dig', 'devnso-prometheus-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'qanso-prometheus-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodnso-prometheus-01.' + domain ) }}"
vpn_nodes:
- "{{ lookup('community.general.dig', 'devnso-vpn-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'qanso-vpn-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodnso-vpn-01.' + domain ) }}"
k8s_nodes_devnso:
- "{{ lookup('community.general.dig', 'devnso-kube-node-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'devnso-kube-node-02.' + domain ) }}"
- "{{ lookup('community.general.dig', 'devnso-kube-node-03.' + domain ) }}"
- "{{ lookup('community.general.dig', 'devnso-kube-node-04.' + domain ) }}"
- "{{ lookup('community.general.dig', 'devnso-kube-node-05.' + domain ) }}"
k8s_nodes_qanso:
- "{{ lookup('community.general.dig', 'qanso-kube-node-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'qanso-kube-node-02.' + domain ) }}"
- "{{ lookup('community.general.dig', 'qanso-kube-node-03.' + domain ) }}"
- "{{ lookup('community.general.dig', 'qanso-kube-node-04.' + domain ) }}"
- "{{ lookup('community.general.dig', 'qanso-kube-node-05.' + domain ) }}"
k8s_nodes_prodnso:
- "{{ lookup('community.general.dig', 'prodnso-kube-node-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodnso-kube-node-02.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodnso-kube-node-03.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodnso-kube-node-04.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodnso-kube-node-05.' + domain ) }}"
k8s_nodes_mobene:
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-02.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-03.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-04.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-05.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-06.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodwork01-kube-node-07.' + domain ) }}"
k8s_nodes_demompmx:
- "{{ lookup('community.general.dig', 'demompmx-kube-node-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'demompmx-kube-node-02.' + domain ) }}"
- "{{ lookup('community.general.dig', 'demompmx-kube-node-03.' + domain ) }}"
- "{{ lookup('community.general.dig', 'demompmx-kube-node-04.' + domain ) }}"
- "{{ lookup('community.general.dig', 'demompmx-kube-node-05.' + domain ) }}"
- name: "Allow SSH in UFW"
ufw:
rule: limit
port: 22
proto: tcp
src: "{{ item }}"
loop: "{{ ip_whitelist_netgo + vpn_nodes }}"
- name: "Allow node-exporter in UFW with port <{{ service_port_node_exporter }}>"
ufw:
rule: allow
port: "{{ service_port_node_exporter }}"
proto: tcp
src: "{{ item }}"
loop: "{{ prometheus_endpoints }}"
- name: "Allow blackbox-exporter in UFW with port <{{ service_port_blackbox_exporter }}>"
ufw:
rule: allow
port: "{{ service_port_blackbox_exporter }}"
proto: tcp
src: "{{ item }}"
loop: "{{ prometheus_endpoints + k8s_nodes_mobene + k8s_nodes_devnso + k8s_nodes_qanso + k8s_nodes_prodnso + k8s_nodes_demompmx }}"
- name: "Set firewall default policy"
ufw:
state: enabled
policy: reject
- name: "configure ssh_hardening"
include_role:
name: devsec.hardening.ssh_hardening
apply:
tags:
- ssh_hardening
tags:
- ssh_hardening
- name: "Install blackbox-exporter via include_role"
include_role:
name: cloudalchemy.blackbox-exporter
apply:
tags:
- blackbox
tags:
- blackbox

@ -1,5 +1,4 @@
---
- name: "Create ssh key"
delegate_to: localhost
community.digitalocean.digital_ocean_sshkey:

@ -72,3 +72,6 @@
- role: nginx
when: "'nginx' in group_names"
- role: blackbox
when: "'blackbox' in group_names"

@ -1,5 +1,5 @@
[blackbox]
dev-blackbox-01
devnso-blackbox-01
[stage_devnso:children]
blackbox

@ -1,12 +0,0 @@
modules:
http_200:
prober: http
timeout: 5s
http:
method: GET
fail_if_ssl: false
fail_if_not_ssl: false
tls_config:
insecure_skip_verify: false
preferred_ip_protocol: "ip4"
valid_status_codes: [200]

@ -83,12 +83,12 @@ scrape_configs:
application: prom2teams
{% endif %}
- job_name: 'blackbox'
- job_name: 'blackbox_2xx'
metrics_path: /probe
params:
module: [http_2xx]
static_configs:
- targets: {{ blackbox_http_2xx_targets + blackbox_http_2xx_additional_targets }}
- targets: {{ (blackbox_http_2xx_targets | default([])) + (blackbox_http_2xx_additional_targets | default([])) }}
labels:
env: {{ stage }}
project: monitoring
@ -99,7 +99,43 @@ scrape_configs:
- source_labels: [__param_target]
target_label: instance
- target_label: __address__
replacement: "{{ blackbox_exporter_fqdn }}:9115"
replacement: "{{ shared_service_hostname_blackbox_exporter }}:{{ service_port_blackbox_exporter }}"
- job_name: 'blackbox_3xx'
metrics_path: /probe
params:
module: [http_3xx]
static_configs:
- targets: {{ (blackbox_http_3xx_targets | default([])) + (blackbox_http_3xx_additional_targets | default([])) }}
labels:
env: {{ stage }}
project: monitoring
application: blackbox
relabel_configs:
- source_labels: [__address__]
target_label: __param_target
- source_labels: [__param_target]
target_label: instance
- target_label: __address__
replacement: "{{ shared_service_hostname_blackbox_exporter }}:{{ service_port_blackbox_exporter }}"
- job_name: 'blackbox_4xx'
metrics_path: /probe
params:
module: [http_4xx]
static_configs:
- targets: {{ (blackbox_http_4xx_targets | default([])) + (blackbox_http_4xx_additional_targets | default([])) }}
labels:
env: {{ stage }}
project: monitoring
application: blackbox
relabel_configs:
- source_labels: [__address__]
target_label: __param_target
- source_labels: [__param_target]
target_label: instance
- target_label: __address__
replacement: "{{ shared_service_hostname_blackbox_exporter }}:{{ service_port_blackbox_exporter }}"
############################################
### Traefik ####
@ -437,7 +473,7 @@ scrape_configs:
metrics_path: '/metrics'
static_configs:
- targets:
- '{{ blackbox_exporter_fqdn }}:9100'
- '{{ shared_service_hostname_blackbox_exporter }}:{{ service_port_node_exporter }}'
labels:
env: {{ stage }}
project: servers

Loading…
Cancel
Save