feat: added connect/keycloak setup

master
Sven Ketelsen 5 years ago
parent 3077579fd6
commit 53352252da

2
.gitignore vendored

@ -1,3 +1,5 @@
.project .project
/vault-pass /vault-pass
image.tar.gz

@ -27,3 +27,11 @@ Create/Start servers for stage-dev
# TODO # TODO
212.121.131.106 - Siemansdamm - IPFire 212.121.131.106 - Siemansdamm - IPFire
Keykloak
Read Flow for Docker
Get ID by 'docker auth'
Update Client 'docker-registry'
Download Installation
Docker-Registry
Use Installation from Keycloak Client 'docker-registry'

@ -7,10 +7,7 @@ domain: smardigo.digital
use_ssl: true use_ssl: true
http_s: "http{{ use_ssl | ternary('s', '', omit) }}" http_s: "http{{ use_ssl | ternary('s', '', omit) }}"
service_prefix: ''
service_suffix: ''
service_name: "{{ inventory_hostname }}" service_name: "{{ inventory_hostname }}"
stage_server_name: "{{ inventory_hostname }}" stage_server_name: "{{ inventory_hostname }}"
stage_server_hostname: "{{ inventory_hostname }}" stage_server_hostname: "{{ inventory_hostname }}"
stage_server_url_host: "{{ stage_server_name }}.{{ domain }}" stage_server_url_host: "{{ stage_server_name }}.{{ domain }}"

@ -3,3 +3,19 @@
stage: "dev" stage: "dev"
alertmanager_channel_smardigo: "#monitoring-qa" alertmanager_channel_smardigo: "#monitoring-qa"
# TODO read configuration with hetzner rest api
filebeat_extra_hosts: [
{
hostname: logstash-dev-elastic-stack-01,
ip: 10.0.0.2,
},
{
hostname: logstash-dev-elastic-stack-02,
ip: 10.0.0.3
},
{
hostname: logstash-dev-elastic-stack-03,
ip: 10.0.0.4,
},
]

@ -0,0 +1,7 @@
---
connect_auth_module: oidc
connect_oidc_client_id: connect-01
connect_oidc_client_secret: 9e234965-1041-4653-8a0e-db964c04bc26
connect_oidc_registration_id: connect-01
connect_oidc_issuer_uri: https://dev-keycloak-01.smardigo.digital/auth/realms/smardigo-01

@ -0,0 +1,7 @@
---
connect_auth_module: oidc
connect_oidc_client_id: connect-02
connect_oidc_client_secret: 9e234965-1041-4653-8a0e-db964c04bc26
connect_oidc_registration_id: connect-02
connect_oidc_issuer_uri: https://dev-keycloak-01.smardigo.digital/auth/realms/smardigo-01

@ -0,0 +1,7 @@
---
connect_auth_module: oidc
connect_oidc_client_id: connect-03
connect_oidc_client_secret: 9e234965-1041-4653-8a0e-db964c04bc26
connect_oidc_registration_id: connect-03
connect_oidc_issuer_uri: https://dev-keycloak-01.smardigo.digital/auth/realms/smardigo-01

@ -0,0 +1,7 @@
---
connect_auth_module: oidc
connect_oidc_client_id: connect-04
connect_oidc_client_secret: 9e234965-1041-4653-8a0e-db964c04bc26
connect_oidc_registration_id: connect-04
connect_oidc_issuer_uri: https://dev-keycloak-01.smardigo.digital/auth/realms/smardigo-01

@ -1,11 +1,12 @@
--- ---
hetzner_server_type: cx21 hetzner_server_type: cx31
hetzner_ssh_keys: hetzner_ssh_keys:
- stefan@curow.de - stefan@curow.de
- sven.ketelsen@arxes-tolina.de - sven.ketelsen@arxes-tolina.de
smardigo_plattform_users: smardigo_plattform_users:
- 'elastic'
- 'stefan.curow' - 'stefan.curow'
- 'sven.ketelsen' - 'sven.ketelsen'

@ -1,11 +1,12 @@
--- ---
hetzner_server_type: cx21 hetzner_server_type: cx31
hetzner_ssh_keys: hetzner_ssh_keys:
- stefan@curow.de - stefan@curow.de
- sven.ketelsen@arxes-tolina.de - sven.ketelsen@arxes-tolina.de
smardigo_plattform_users: smardigo_plattform_users:
- 'elastic'
- 'stefan.curow' - 'stefan.curow'
- 'sven.ketelsen' - 'sven.ketelsen'

@ -1,11 +1,12 @@
--- ---
hetzner_server_type: cx21 hetzner_server_type: cx31
hetzner_ssh_keys: hetzner_ssh_keys:
- stefan@curow.de - stefan@curow.de
- sven.ketelsen@arxes-tolina.de - sven.ketelsen@arxes-tolina.de
smardigo_plattform_users: smardigo_plattform_users:
- 'elastic'
- 'stefan.curow' - 'stefan.curow'
- 'sven.ketelsen' - 'sven.ketelsen'

@ -0,0 +1,162 @@
---
keycloak: {
realms: [
{
name: 'smardigo-01',
display_name: 'smardigo-01',
users: [
{
"username": "connect-admin",
"password": "connect-admin",
}
],
clients: [
{
clientId: 'connect-01',
name: 'connect-01',
admin_url: '',
root_url: '',
redirect_uris: '
[
"https://dev-connect-01.smardigo.digital/*",
"http://dev-connect-01.smardigo.digital/*",
]',
secret: '9e234965-1041-4653-8a0e-db964c04bc26',
web_origins: '
[
"https://dev-connect-01.smardigo.digital",
]',
},
{
clientId: 'connect-02',
name: 'connect-02',
admin_url: '',
root_url: '',
redirect_uris: '
[
"https://dev-connect-02.smardigo.digital/*",
"http://dev-connect-02.smardigo.digital/*",
]',
secret: '9e234965-1041-4653-8a0e-db964c04bc26',
web_origins: '
[
"https://dev-connect-02.smardigo.digital",
]',
},
{
clientId: 'connect-03',
name: 'connect-03',
admin_url: '',
root_url: '',
redirect_uris: '
[
"https://dev-connect-03.smardigo.digital/*",
"http://dev-connect-03.smardigo.digital/*",
]',
secret: '9e234965-1041-4653-8a0e-db964c04bc26',
web_origins: '
[
"https://dev-connect-03.smardigo.digital",
]',
},
{
clientId: 'connect-04',
name: 'connect-04',
admin_url: '',
root_url: '',
redirect_uris: '
[
"https://dev-connect-04.smardigo.digital/*",
"http://dev-connect-04.smardigo.digital/*",
]',
secret: '9e234965-1041-4653-8a0e-db964c04bc26',
web_origins: '
[
"https://dev-connect-04.smardigo.digital",
]',
},
{
clientId: 'connect-05',
name: 'connect-05',
admin_url: '',
root_url: '',
redirect_uris: '
[
"https://dev-connect-05.smardigo.digital/*",
"http://dev-connect-05.smardigo.digital/*",
]',
secret: '9e234965-1041-4653-8a0e-db964c04bc26',
web_origins: '
[
"https://dev-connect-05.smardigo.digital",
]',
},
{
clientId: 'connect-06',
name: 'connect-06',
admin_url: '',
root_url: '',
redirect_uris: '
[
"https://dev-connect-06.smardigo.digital/*",
"http://dev-connect-06.smardigo.digital/*",
]',
secret: '9e234965-1041-4653-8a0e-db964c04bc26',
web_origins: '
[
"https://dev-connect-06.smardigo.digital",
]',
},
{
clientId: 'connect-07',
name: 'connect-07',
admin_url: '',
root_url: '',
redirect_uris: '
[
"https://dev-connect-07.smardigo.digital/*",
"http://dev-connect-07.smardigo.digital/*",
]',
secret: '9e234965-1041-4653-8a0e-db964c04bc26',
web_origins: '
[
"https://dev-connect-07.smardigo.digital",
]',
},
{
clientId: 'connect-08',
name: 'connect-08',
admin_url: '',
root_url: '',
redirect_uris: '
[
"https://dev-connect-08.smardigo.digital/*",
"http://dev-connect-08.smardigo.digital/*",
]',
secret: '9e234965-1041-4653-8a0e-db964c04bc26',
web_origins: '
[
"https://dev-connect-08.smardigo.digital",
]',
},
{
clientId: 'connect-09',
name: 'connect-09',
admin_url: '',
root_url: '',
redirect_uris: '
[
"https://dev-connect-09.smardigo.digital/*",
"http://dev-connect-09.smardigo.digital/*",
]',
secret: '9e234965-1041-4653-8a0e-db964c04bc26',
web_origins: '
[
"https://dev-connect-09.smardigo.digital",
]',
}
]
}
]
}

@ -2,7 +2,7 @@
- name: 'apply setup to {{ host | default("all") }}' - name: 'apply setup to {{ host | default("all") }}'
hosts: '{{ host | default("all") }}' hosts: '{{ host | default("all") }}'
serial: "{{ serial_number | default(5) }}" serial: "{{ serial_number | default(1) }}"
gather_facts: no gather_facts: no
become: no become: no

@ -20,27 +20,6 @@
when: when:
- send_status_messages - send_status_messages
- name: Gather current server infos
hcloud_server_info:
api_token: "{{ hetzner_authentication_token }}"
register: hetzner_server_infos
delegate_to: 127.0.0.1
become: false
- name: Save current server infos as variable (fact)
set_fact:
hetzner_server_infos_json: "{{ hetzner_server_infos.hcloud_server_info }}"
delegate_to: 127.0.0.1
become: false
- name: Read ip for {{ inventory_hostname }}
set_fact:
stage_server_ip: "{{ item.ipv4_address }}"
when: item.name == inventory_hostname
with_items: "{{ hetzner_server_infos_json }}"
delegate_to: 127.0.0.1
become: false
- name: 'Insert/Update ssh config in ~/.ssh/config' - name: 'Insert/Update ssh config in ~/.ssh/config'
blockinfile: blockinfile:
marker: '# {mark} managed by ansible (ssh config for {{ inventory_hostname }})' marker: '# {mark} managed by ansible (ssh config for {{ inventory_hostname }})'
@ -100,7 +79,7 @@
state: present state: present
exclusive: true exclusive: true
key: "{{ lookup('file', '{{ inventory_dir }}/keys/{{ item }}/id_rsa.pub') }}" key: "{{ lookup('file', '{{ inventory_dir }}/keys/{{ item }}/id_rsa.pub') }}"
loop: '{{ smardigo_plattform_users }}' loop: '{{ smardigo_plattform_users | difference(["elastic"]) }}'
tags: tags:
- users - users

@ -1,6 +1,6 @@
--- ---
connect_image_name: docker.arxes-tolina.de/smardigo/connect-whitelabel-app connect_image_name: docker.dev-at.de/smardigo/connect-whitelabel-app
connect_version: '7.1.0-SNAPSHOT' connect_version: '7.1.0-SNAPSHOT'
connect_admin_username: "connect-admin" connect_admin_username: "connect-admin"
@ -20,14 +20,14 @@ connect_postgres_id: "{{ service_name }}-postgres-connect"
connect_labels: [ connect_labels: [
'"traefik.enable=true"', '"traefik.enable=true"',
'"traefik.http.routers.{{ connect_id }}.service={{ connect_id }}"', '"traefik.http.routers.{{ connect_id }}.service={{ connect_id }}"',
'"traefik.http.routers.{{ connect_id }}.rule=Host(`{{ connect_id }}.{{ domain }}`)"', '"traefik.http.routers.{{ connect_id }}.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ connect_id }}.entrypoints=websecure"', '"traefik.http.routers.{{ connect_id }}.entrypoints=websecure"',
'"traefik.http.routers.{{ connect_id }}.tls=true"', '"traefik.http.routers.{{ connect_id }}.tls=true"',
'"traefik.http.routers.{{ connect_id }}.tls.certresolver=letsencrypt"', '"traefik.http.routers.{{ connect_id }}.tls.certresolver=letsencrypt"',
'"traefik.http.services.{{ connect_id }}.loadbalancer.server.port={{ service_port }}"', '"traefik.http.services.{{ connect_id }}.loadbalancer.server.port={{ service_port }}"',
'"traefik.http.routers.{{ connect_id }}-admin.service={{ connect_id }}-admin"', '"traefik.http.routers.{{ connect_id }}-admin.service={{ connect_id }}-admin"',
'"traefik.http.routers.{{ connect_id }}-admin.rule=Host(`{{ connect_id }}.{{ domain }}`)"', '"traefik.http.routers.{{ connect_id }}-admin.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ connect_id }}-admin.entrypoints=admin-service"', '"traefik.http.routers.{{ connect_id }}-admin.entrypoints=admin-service"',
'"traefik.http.routers.{{ connect_id }}-admin.tls=true"', '"traefik.http.routers.{{ connect_id }}-admin.tls=true"',
'"traefik.http.routers.{{ connect_id }}-admin.tls.certresolver=letsencrypt"', '"traefik.http.routers.{{ connect_id }}-admin.tls.certresolver=letsencrypt"',
@ -38,7 +38,7 @@ connect_labels: [
'"traefik.http.services.{{ connect_id }}-admin.loadbalancer.server.port={{ management_port }}"', '"traefik.http.services.{{ connect_id }}-admin.loadbalancer.server.port={{ management_port }}"',
'"traefik.http.routers.{{ connect_id }}-monitor.service={{ service_name }}-node-exporter"', '"traefik.http.routers.{{ connect_id }}-monitor.service={{ service_name }}-node-exporter"',
'"traefik.http.routers.{{ connect_id }}-monitor.rule=Host(`{{ connect_id }}.{{ domain }}`)"', '"traefik.http.routers.{{ connect_id }}-monitor.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ connect_id }}-monitor.entrypoints=admin-system"', '"traefik.http.routers.{{ connect_id }}-monitor.entrypoints=admin-system"',
'"traefik.http.routers.{{ connect_id }}-monitor.tls=true"', '"traefik.http.routers.{{ connect_id }}-monitor.tls=true"',
'"traefik.http.routers.{{ connect_id }}-monitor.tls.certresolver=letsencrypt"', '"traefik.http.routers.{{ connect_id }}-monitor.tls.certresolver=letsencrypt"',

@ -18,34 +18,13 @@
when: when:
- send_status_messages - send_status_messages
- name: Gather current server infos
hcloud_server_info:
api_token: "{{ hetzner_authentication_token }}"
register: hetzner_server_infos
delegate_to: 127.0.0.1
become: false
- name: Save current server infos as variable (fact)
set_fact:
hetzner_server_infos_json: "{{ hetzner_server_infos.hcloud_server_info }}"
delegate_to: 127.0.0.1
become: false
- name: Read ip for {{ inventory_hostname }}
set_fact:
stage_server_ip: "{{ item.ipv4_address }}"
when: item.name == inventory_hostname
with_items: "{{ hetzner_server_infos_json }}"
delegate_to: 127.0.0.1
become: false
- name: "Setup DNS configuration for {{ service_name }} connect" - name: "Setup DNS configuration for {{ service_name }} connect"
include_role: include_role:
name: _digitalocean name: _digitalocean
tasks_from: domain tasks_from: domain
vars: vars:
record_data: "{{ stage_server_ip }}" record_data: "{{ stage_server_ip }}"
record_name: "{{ service_name }}-connect" record_name: "{{ service_name }}"
- name: "Check if {{ service_name }}/docker-compose.yml exists" - name: "Check if {{ service_name }}/docker-compose.yml exists"
stat: stat:

@ -1,137 +0,0 @@
---
docker_registry_id: "{{ service_name }}-registry"
docker_registry_image_name: "library/registry"
docker_registry_image_version: "2.7"
docker_portus_secret_key_base: docker-portus-secret-key-base
docker_portus_password: docker-portus-admin
docker_postgres_portus_image_name: "postgres"
docker_postgres_portus_image_version: "12"
docker_portus_postgres_database: docker-portus-postgres
docker_portus_postgres_username: docker-portus-postgres-admin
docker_portus_postgres_password: docker-portus-postgres-admin
docker_registry_docker: {
networks: [
{
name: front-tier,
external: true,
},
{
name: back-tier,
external: true,
},
],
volumes: [
{
name: "{{ service_name }}-registry-data",
},
{
name: "{{ service_name }}-postgres-portus-data"
}
],
services: [
{
name: "{{ service_name }}-portus",
image_name: "opensuse/portus",
image_version: "2.4",
environment: [
"PORTUS_MACHINE_FQDN_VALUE: \"{{ stage_server_url_host }}\"",
"PORTUS_DB_HOST: \"{{ service_name }}-postgres-portus\"",
"PORTUS_DB_DATABASE: \"{{ docker_portus_postgres_database }}\"",
"PORTUS_DB_USERNAME: \"{{ docker_portus_postgres_username }}\"",
"PORTUS_DB_PASSWORD: \"{{ docker_portus_postgres_password }}\"",
"PORTUS_DB_POOL: \"5\"",
"PORTUS_SECRET_KEY_BASE: \"{{ docker_portus_secret_key_base }}\"",
"PORTUS_KEY_PATH: \"/certificates/portus.key\"",
"PORTUS_PASSWORD: \"{{ docker_portus_password }}\"",
"PORTUS_PUMA_TLS_KEY: \"/certificates/portus.key\"",
"PORTUS_PUMA_TLS_CERT: \"/certificates/portus.crt\"",
"RAILS_SERVE_STATIC_FILES: \"true\"",
],
volumes: [
'"{{ service_name }}-postgres-portus-data:/var/lib/postgresql/data"',
],
networks: [
'"front-tier"',
'"back-tier"',
]
},
{
name: "{{ service_name }}-portus-background",
image_name: "opensuse/portus",
image_version: "2.4",
environment: [
"CCONFIG_PREFIX: \"PORTUS\"",
"PORTUS_MACHINE_FQDN_VALUE: \"{{ stage_server_url_host }}\"",
"PORTUS_DB_HOST: \"{{ service_name }}-postgres-portus\"",
"PORTUS_DB_DATABASE: \"{{ docker_portus_postgres_database }}\"",
"PORTUS_DB_USERNAME: \"{{ docker_portus_postgres_username }}\"",
"PORTUS_DB_PASSWORD: \"{{ docker_portus_postgres_password }}\"",
"PORTUS_DB_POOL: \"5\"",
"PORTUS_SECRET_KEY_BASE: \"{{ docker_portus_secret_key_base }}\"",
"PORTUS_KEY_PATH: \"/certificates/portus.key\"",
"PORTUS_PASSWORD: \"{{ docker_portus_password }}\"",
"PORTUS_BACKGROUND: \"true\"",
],
volumes: [
'"./secrets:/certificates:ro"',
],
networks: [
'"back-tier"',
]
},
{
name: "{{ service_name }}-postgres-portus",
image_name: "{{ docker_postgres_portus_image_name }}",
image_version: "{{ docker_postgres_portus_image_version }}",
environment: [
'POSTGRES_DB: "{{ docker_portus_postgres_database }}"',
'POSTGRES_USER: "{{ docker_portus_postgres_username }}"',
'POSTGRES_PASSWORD: "{{ docker_portus_postgres_password }}"',
],
volumes: [
'"{{ service_name }}-postgres-portus-data:/var/lib/postgresql/data"',
],
networks: [
'"back-tier"',
],
ports: "{{ docker_registry_postgres_ports | default([]) }}",
},
{
name: "{{ service_name }}-registry",
image_name: "{{ docker_registry_image_name }}",
image_version: "{{ docker_registry_image_version }}",
command: [
'"/bin/sh"',
'"/etc/docker/registry/init"',
],
environment: [
"REGISTRY_HTTP_SECRET: \"3a025df1-c7df-4c63-9ec4-103ffe3bde42\"",
"REGISTRY_AUTH_TOKEN_REALM: \"{{ stage_server_url }}/v2/token\"",
"REGISTRY_AUTH_TOKEN_SERVICE: \"{{ stage_server_url_host }}\"",
"REGISTRY_AUTH_TOKEN_ISSUER: \"{{ stage_server_url_host }}\"",
"REGISTRY_AUTH_TOKEN_ROOTCERTBUNDLE: \"/secrets/portus.crt\"",
"REGISTRY_HTTP_TLS_CERTIFICATE: \"/secrets/portus.crt\"",
"REGISTRY_HTTP_TLS_KEY: \"/secrets/portus.key\"",
],
volumes: [
'"{{ service_name }}-registry-data:/var/lib/registry"',
'"./secrets:/secrets:ro"',
'"./registry/init:/etc/docker/registry/init:ro"',
'"./registry/config.yml:/etc/docker/registry/config.yml:ro"',
],
networks: [
'"front-tier"'
],
ports: "{{ docker_registry_ports | default([]) }}",
}
]
}

@ -1,171 +0,0 @@
---
### tags:
### update_deployment
- name: "Send mattermost message"
uri:
url: "{{ mattermost_hook_smardigo }}"
method: POST
body: "{{ lookup('template','mattermost-deploy-start.json.j2') }}"
body_format: json
headers:
Content-Type: "application/json"
delegate_to: 127.0.0.1
become: false
when:
- send_status_messages
- name: Gather current server infos
hcloud_server_info:
api_token: "{{ hetzner_authentication_token }}"
register: hetzner_server_infos
delegate_to: 127.0.0.1
become: false
- name: Save current server infos as variable (fact)
set_fact:
hetzner_server_infos_json: "{{ hetzner_server_infos.hcloud_server_info }}"
delegate_to: 127.0.0.1
become: false
- name: Read ip for {{ inventory_hostname }}
set_fact:
stage_server_ip: "{{ item.ipv4_address }}"
when: item.name == inventory_hostname
with_items: "{{ hetzner_server_infos_json }}"
delegate_to: 127.0.0.1
become: false
- name: "Setup DNS configuration for {{ service_name }}"
include_role:
name: _digitalocean
tasks_from: domain
vars:
record_data: "{{ stage_server_ip }}"
record_name: "{{ service_name }}"
- name: "Setup public DNS configuration for {{ service_name }}"
include_role:
name: _digitalocean
tasks_from: domain
vars:
record_data: "{{ item.ip }}"
record_name: "{{ item.name }}"
loop: "{{ docker_registry_public_dns_entries }}"
when: docker_registry_public_dns_entries is defined
- name: "Check docker networks"
include_role:
name: _docker
tasks_from: networks
- name: "Check if {{ service_name }}/docker-compose.yml exists"
stat:
path: '{{ service_base_path }}/{{ service_name }}/docker-compose.yml'
register: check_docker_compose_file
tags:
- update_deployment
- name: "Stop {{ service_name }}"
shell: docker-compose down
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
when: check_docker_compose_file.stat.exists
ignore_errors: yes
tags:
- update_deployment
- name: "Deploy service configuration for {{ service_name }}"
include_role:
name: _deploy
tasks_from: configs
vars:
current_config: "docker-registry"
current_base_path: "{{ service_base_path }}"
current_destination: "{{ service_name }}"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
current_docker: "{{ docker_registry_docker }}"
- name: "Update {{ service_name }}"
shell: docker-compose pull
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
tags:
- update_deployment
- name: "Start {{ service_name }}"
shell: docker-compose up -d
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
tags:
- update_deployment
- name: "Update landing page entries for {{ service_name }}"
include_role:
name: _deploy
tasks_from: caddy_landing_page
vars:
current_services: [
{
current_name: "{{ service_name }}",
current_url: "{{ http_s }}://{{ service_url }}",
current_version: "{{ docker_registry_image_version }}",
current_date: "{{ ansible_date_time.iso8601 }}",
management: "{{ http_s }}://{{ service_url }}:{{ monitor_port_service }}/management",
},
]
tags:
- update_deployment
- name: "Update landing page with public entries {{ service_name }}"
include_role:
name: _deploy
tasks_from: caddy_landing_page
vars:
current_services: [
{
current_name: "{{ item.name }}",
current_url: "{{ http_s }}://{{ item.name }}.{{ domain }}",
current_version: "{{ docker_registry_image_version }}",
current_date: "{{ ansible_date_time.iso8601 }}",
management: "{{ http_s }}://{{ service_url }}:{{ monitor_port_service }}/management",
},
]
loop: "{{ docker_registry_public_dns_entries }}"
when: docker_registry_public_dns_entries is defined
tags:
- update_deployment
- name: "Update landing page with extra entries for {{ service_name }}"
include_role:
name: _deploy
tasks_from: caddy_landing_page
vars:
current_services: [
{
current_name: "{{ item.name }}",
current_url: "{{ item.domain }}",
current_version: "{{ docker_registry_image_version }}",
current_date: "{{ ansible_date_time.iso8601 }}",
management: "{{ http_s }}://{{ service_url }}:{{ monitor_port_service }}/management",
},
]
loop: "{{ docker_registry_extra_domain_entries }}"
when: docker_registry_extra_domain_entries is defined
tags:
- update_deployment
- name: "Send mattermost messsge"
uri:
url: "{{ mattermost_hook_smardigo }}"
method: POST
body: "{{ lookup('template','mattermost-deploy-end.json.j2') }}"
body_format: json
headers:
Content-Type: "application/json"
delegate_to: 127.0.0.1
become: false
when:
- send_status_messages

@ -0,0 +1,30 @@
---
logstash_hostname: "logstash-dev-elastic-stack-01"
filebeat_image_name: "docker.elastic.co/beats/filebeat"
filebeat_image_version: "7.12.0"
filebeat_id: "{{ service_name }}-filebeat"
filebeat_docker: {
services: [
{
name: "{{ filebeat_id }}",
image_name: "{{ filebeat_image_name }}",
image_version: "{{ filebeat_image_version }}",
user: root,
environment: [
"node.name: \"{{ filebeat_id }}\"",
],
volumes: [
'"./config/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro"',
'"/var/lib/docker/containers/:/var/lib/docker/containers/:ro"',
'"/var/run/docker.sock:/var/run/docker.sock:ro"',
'"/var/log/:/var/log/:ro"',
'"./certs:/usr/share/filebeat/config/certificates:ro"',
],
extra_hosts: "{{ filebeat_extra_hosts | default([]) }}",
},
],
}

@ -0,0 +1,63 @@
---
- name: "Send mattermost messsge"
uri:
url: "{{ mattermost_hook_smardigo }}"
method: POST
body: "{{ lookup('template','mattermost-deploy-start.json.j2') }}"
body_format: json
headers:
Content-Type: "application/json"
delegate_to: 127.0.0.1
become: false
when:
- send_status_messages
- name: "Check if {{ role_name }}/docker-compose.yml exists"
stat:
path: '{{ service_base_path }}/{{ role_name }}/docker-compose.yml'
register: check_docker_compose_file
- name: "Stop {{ role_name }}"
shell: docker-compose down
args:
chdir: '{{ service_base_path }}/{{ role_name }}'
when: check_docker_compose_file.stat.exists
ignore_errors: yes
- name: "Deploy service configuration for {{ role_name }}"
include_role:
name: _deploy
tasks_from: configs
vars:
current_config: "filebeat"
current_base_path: "{{ service_base_path }}"
current_destination: "filebeat"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
current_docker: "{{ filebeat_docker }}"
- name: "Update {{ role_name }}"
shell: docker-compose pull
args:
chdir: '{{ service_base_path }}/{{ role_name }}'
tags:
- update_deployment
- name: "Start {{ role_name }}"
shell: docker-compose up -d
args:
chdir: '{{ service_base_path }}/{{ role_name }}'
- name: "Send mattermost messsge"
uri:
url: "{{ mattermost_hook_smardigo }}"
method: POST
body: "{{ lookup('template','mattermost-deploy-end.json.j2') }}"
body_format: json
headers:
Content-Type: "application/json"
delegate_to: 127.0.0.1
become: false
when:
- send_status_messages

@ -0,0 +1,68 @@
---
### tags:
- name: Get all Firewalls from Hetzner
uri:
url: "https://api.hetzner.cloud/v1/firewalls"
headers:
accept: application/json
authorization: Bearer {{ hetzner_authentication_token }}
return_content: yes
register: hetzner_firewalls_response
delegate_to: 127.0.0.1
run_once: true
- name: Save firewall entries as variable (fact)
set_fact:
hetzner_firewalls_response_json: "{{ hetzner_firewalls_response.json }}"
run_once: true
- name: Parse firewall entries
set_fact:
firewall_records: "{{ hetzner_firewalls_response_json.firewalls | json_query(jmesquery) }}"
vars:
jmesquery: '[*].{id: id, name: name}'
run_once: true
- name: Print firewall entries
debug:
msg: "{{ firewall_records }}"
run_once: true
- name: Read firewall entry for {{ current_firewall_name }}
set_fact:
firewall_record: "{{ firewall_records | selectattr('name', 'equalto', current_firewall_name) | list | first | default({'name': '-', 'id': '-'}) }}"
- name: Print firewall entry for {{ current_firewall_name }}
debug:
msg: "{{ firewall_record }}"
- name: Save firewall entry {{ current_firewall_name }}
uri:
method: POST
url: "https://api.hetzner.cloud/v1/firewalls"
body_format: json
body: "{{ lookup('template','firewall-{{ current_firewall_name }}.json.j2') }}"
headers:
accept: application/json
authorization: Bearer {{ hetzner_authentication_token }}
return_content: yes
status_code: 201
when: firewall_records | selectattr("name", "equalto", current_firewall_name) | list | length == 0
delegate_to: 127.0.0.1
# TODO port changes are not written corectly
- name: Update firewall entry {{ current_firewall_name }}
uri:
method: PUT
url: "https://api.hetzner.cloud/v1/firewalls/{{ firewall_record.id }}"
body_format: json
body: "{{ lookup('template','firewall-{{ current_firewall_name }}.json.j2') }}"
headers:
accept: application/json
authorization: Bearer {{ hetzner_authentication_token }}
return_content: yes
status_code: 200
when: firewall_records | selectattr("name", "equalto", current_firewall_name) | list | length == 1
delegate_to: 127.0.0.1

@ -2,57 +2,16 @@
### tags: ### tags:
- name: Get all Firewalls from Hetzner - name: "Setup firewall"
uri: include_tasks: configure-firewall.yml
url: "https://api.hetzner.cloud/v1/firewalls"
headers:
accept: application/json
authorization: Bearer {{ hetzner_authentication_token }}
return_content: yes
register: hetzner_firewalls_response
delegate_to: 127.0.0.1
- name: Save firewall entries as variable (fact)
set_fact:
hetzner_firewalls_response_json: "{{ hetzner_firewalls_response.json }}"
- name: Parse firewall entry for default
set_fact:
firewall_record: "{{ hetzner_firewalls_response_json.firewalls | json_query(jmesquery) | first | default({'name': '-', 'id': '-'}) }}"
vars: vars:
jmesquery: '[*].{id: id, name: name}' current_firewall_name: '{{ current_firewall }}'
with_items:
- name: Print firewall entry for default - 'default'
debug: - 'kibana'
msg: "{{ firewall_record }}" - 'monitoring'
loop_control:
- name: Save firewall entry default loop_var: current_firewall
uri:
method: POST
url: "https://api.hetzner.cloud/v1/firewalls"
body_format: json
body: "{{ lookup('template','firewall-default.json.j2') }}"
headers:
accept: application/json
authorization: Bearer {{ hetzner_authentication_token }}
return_content: yes
status_code: 201
when: firewall_record.id == '-'
delegate_to: 127.0.0.1
- name: Update firewall entry default
uri:
method: PUT
url: "https://api.hetzner.cloud/v1/firewalls/{{ firewall_record.id }}"
body_format: json
body: "{{ lookup('template','firewall-default.json.j2') }}"
headers:
accept: application/json
authorization: Bearer {{ hetzner_authentication_token }}
return_content: yes
status_code: 200
when: firewall_record.id != '-'
delegate_to: 127.0.0.1
- name: Create new server {{ inventory_hostname }} - name: Create new server {{ inventory_hostname }}
hetzner.hcloud.hcloud_server: hetzner.hcloud.hcloud_server:
@ -65,17 +24,6 @@
state: present state: present
delegate_to: 127.0.0.1 delegate_to: 127.0.0.1
- name: Gather current server infos
hcloud_server_info:
api_token: "{{ hetzner_authentication_token }}"
register: hetzner_server_infos
delegate_to: 127.0.0.1
- name: Save current server infos as variable (fact)
set_fact:
hetzner_server_infos_json: "{{ hetzner_server_infos.hcloud_server_info }}"
delegate_to: 127.0.0.1
- name: Read ip for {{ inventory_hostname }} - name: Read ip for {{ inventory_hostname }}
set_fact: set_fact:
stage_server_ip: "{{ item.ipv4_address }}" stage_server_ip: "{{ item.ipv4_address }}"

@ -19,8 +19,10 @@
"protocol": "tcp", "protocol": "tcp",
"port": "22", "port": "22",
"source_ips": [ "source_ips": [
"149.233.6.129/32",
"212.121.131.106/32", "212.121.131.106/32",
"5.9.148.23/32" "5.9.148.23/32",
"87.150.34.206/32"
], ],
"destination_ips": [ "destination_ips": [
] ]
@ -30,8 +32,10 @@
"protocol": "tcp", "protocol": "tcp",
"port": "80", "port": "80",
"source_ips": [ "source_ips": [
"0.0.0.0/0", "149.233.6.129/32",
"::/0" "212.121.131.106/32",
"5.9.148.23/32",
"87.150.34.206/32"
], ],
"destination_ips": [ "destination_ips": [
] ]
@ -41,19 +45,10 @@
"protocol": "tcp", "protocol": "tcp",
"port": "443", "port": "443",
"source_ips": [ "source_ips": [
"0.0.0.0/0", "149.233.6.129/32",
"::/0"
],
"destination_ips": [
]
},
{
"direction": "in",
"protocol": "tcp",
"port": "9080-9085",
"source_ips": [
"212.121.131.106/32", "212.121.131.106/32",
"5.9.148.23/32" "5.9.148.23/32",
"87.150.34.206/32"
], ],
"destination_ips": [ "destination_ips": [
] ]

@ -0,0 +1,22 @@
{
"name": "kibana",
"labels": {
},
"rules": [
{
"direction": "in",
"protocol": "tcp",
"port": "5601",
"source_ips": [
"149.233.6.129/32",
"212.121.131.106/32",
"5.9.148.23/32",
"87.150.34.206/32"
],
"destination_ips": [
]
}
],
"applied_to": [
]
}

@ -0,0 +1,21 @@
{
"name": "monitoring",
"labels": {
},
"rules": [
{
"direction": "in",
"protocol": "tcp",
"port": "9080-9085",
"source_ips": [
"212.121.131.106/32",
"87.150.34.206/32",
"94.130.97.253/32"
],
"destination_ips": [
]
}
],
"applied_to": [
]
}

@ -0,0 +1,104 @@
---
# TODO doesn't bind to local port (currently used by setup keycloak with ansible)
service_port_keycloak_external: "8110"
keycloak_version: "12.0.4"
keycloak_admin_username: "keycloak-admin"
keycloak_admin_password: "keycloak-admin"
keycloak_postgres_version: "12"
keycloak_postgres_database: "keycloak-postgres"
keycloak_postgres_admin_username: "keycloak-postgres-admin"
keycloak_postgres_admin_password: "keycloak-postgres-admin"
keycloak_id: "{{ service_name }}-keycloak"
keycloak_postgres_id: "{{ service_name }}-postgres-keycloak"
keycloak_labels: [
'"traefik.enable=true"',
'"traefik.http.routers.{{ keycloak_id }}.service={{ keycloak_id }}"',
'"traefik.http.routers.{{ keycloak_id }}.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ keycloak_id }}.entrypoints=websecure"',
'"traefik.http.routers.{{ keycloak_id }}.tls=true"',
'"traefik.http.routers.{{ keycloak_id }}.tls.certresolver=letsencrypt"',
'"traefik.http.services.{{ keycloak_id }}.loadbalancer.server.port={{ service_port }}"',
'"traefik.http.routers.{{ keycloak_id }}-monitor.service={{ service_name }}-node-exporter"',
'"traefik.http.routers.{{ keycloak_id }}-monitor.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ keycloak_id }}-monitor.entrypoints=admin-system"',
'"traefik.http.routers.{{ keycloak_id }}-monitor.tls=true"',
'"traefik.http.routers.{{ keycloak_id }}-monitor.tls.certresolver=letsencrypt"',
]
keycloak_docker: {
networks: [
{
name: back-tier,
external: true,
},
{
name: front-tier,
external: true,
},
],
volumes: [
{
name: "{{ keycloak_postgres_id }}-data"
}
],
services: [
{
name: "{{ keycloak_id }}",
image_name: "jboss/keycloak",
image_version: "{{ keycloak_version }}",
labels: "{{ keycloak_labels + ( keycloak_labels_additional | default([])) }}",
environment: [
"PROXY_ADDRESS_FORWARDING: \"true\"",
"KEYCLOAK_USER: \"{{ keycloak_admin_username }}\"",
"KEYCLOAK_PASSWORD: \"{{ keycloak_admin_password }}\"",
"DB_VENDOR: postgres",
"DB_DATABASE: \"{{ keycloak_postgres_database }}\"",
"DB_USER: \"{{ keycloak_postgres_admin_username }}\"",
"DB_PASSWORD: \"{{ keycloak_postgres_admin_password }}\"",
"DB_ADDR: \"{{ keycloak_postgres_id }}\"",
"JAVA_OPTS_APPEND: \"-Dkeycloak.profile.feature.docker=enabled\"",
],
volumes: [
'"./eden-theme:/opt/jboss/keycloak/themes/eden-theme:ro"',
'"./smardigo-theme:/opt/jboss/keycloak/themes/smardigo-theme:ro"',
],
networks: [
'"back-tier"',
'"front-tier"',
],
ports: [
{
external: "{{ service_port_keycloak_external }}",
internal: "{{ service_port_keycloak }}",
},
],
extra_hosts: "{{ connect_extra_hosts | default([]) }}",
},
{
name: "{{ keycloak_postgres_id }}",
image_name: "postgres",
image_version: "{{ keycloak_postgres_version }}",
environment: [
'POSTGRES_DB: "{{ keycloak_postgres_database }}"',
'POSTGRES_USER: "{{ keycloak_postgres_admin_username }}"',
'POSTGRES_PASSWORD: "{{ keycloak_postgres_admin_password }}"',
],
volumes: [
'"{{ keycloak_postgres_id }}-data:/var/lib/postgresql/data"',
],
networks: [
'"back-tier"',
],
ports: "{{ keycloak_postgres_ports | default([]) }}",
},
],
}

@ -0,0 +1,21 @@
---
#- name: Print client {{ client_id }} for realm {{ realm_name }}
# debug:
# msg: "{{ lookup('template','keycloak-realm-create-client.json.j2') }}"
# when: realm_client_ids | selectattr('clientId', 'equalto', client_id) | list | length == 0
# tags:
# - update_realms
- name: Create client {{ client_id }} for realm {{ realm_name }}
uri:
url: http://localhost:{{ service_port_keycloak_external }}/auth/admin/realms/{{ realm_name }}/clients
method: POST
body_format: json
body: "{{ lookup('template','keycloak-realm-create-client.json.j2') }}"
headers:
Authorization: "Bearer {{ access_token}} "
status_code: [201]
when: realm_client_ids | selectattr('clientId', 'equalto', client_id) | list | length == 0
tags:
- update_realms

@ -0,0 +1,119 @@
---
- name: Read realms
uri:
url: http://localhost:{{ service_port_keycloak_external }}/auth/admin/realms
method: GET
headers:
Authorization: "Bearer {{ access_token}} "
status_code: [200]
register: realms
tags:
- update_realms
#- name: Print realms
# debug:
# msg: "{{ realms }}"
# tags:
# - update_realms
- name: Save realms as variable (fact)
set_fact:
realms_json: "{{ realms.json }}"
tags:
- update_realms
- name: Read realm ids
set_fact:
realm_ids: "{{ realms_json | json_query(jmesquery) }}"
vars:
jmesquery: '[*].id'
tags:
- update_realms
- name: Create realm {{ current_realm_name }}
uri:
url: http://localhost:{{ service_port_keycloak_external }}/auth/admin/realms
method: POST
body_format: json
body: "{{ lookup('template','keycloak-realm-create.json.j2') }}"
headers:
Authorization: "Bearer {{ access_token}} "
status_code: [201]
when: current_realm_name not in realm_ids
tags:
- update_realms
- name: Read clients from realm {{ current_realm_name }}
uri:
url: http://localhost:{{ service_port_keycloak_external }}/auth/admin/realms/{{ current_realm_name }}/clients
method: GET
headers:
Authorization: "Bearer {{ access_token}} "
status_code: [200]
register: realm_clients
tags:
- update_realms
#- name: Print clients from realm {{ current_realm_name }}
# debug:
# msg: "{{ realm_clients }}"
# tags:
# - update_realms
- name: Save clients from realm as variable (fact)
set_fact:
realm_clients_json: "{{ realm_clients.json }}"
tags:
- update_realms
- name: Save client ids from realm {{ current_realm_name }}
set_fact:
realm_client_ids: "{{ realm_clients_json | json_query(jmesquery) }}"
vars:
jmesquery: '[*].{id: id, clientId: clientId}'
tags:
- update_realms
- name: Print client ids
debug:
msg: "{{ realm_client_ids }}"
tags:
- update_realms
- name: Create clients from realm {{ current_realm_name }}
include_tasks: configure_client.yml
vars:
realm_name: '{{ current_realm_name }}'
client_id: '{{ client.clientId }}'
client_name: '{{ client.name }}'
admin_url: '{{ client.admin_url }}'
root_url: '{{ client.root_url }}'
redirect_uris: '{{ client.redirect_uris }}'
secret: '{{ client.secret }}'
web_origins: '{{ client.web_origins }}'
access_token: '{{ keycloak_authentication.json.access_token }}'
with_items: "{{ current_realm_clients }}"
loop_control:
loop_var: client
tags:
- update_realms
- name: Create realm {{ current_realm_name }} LDAP user storage provider
include_tasks: configure_user_storage_provider_ldap.yml
vars:
realm: '{{ current_realm_name }}'
provider_name: '{{ provider.name }}'
usersDn: '{{ provider.usersDn }}'
ldap_username: '{{ provider.username }}'
ldap_password: '{{ provider.password }}'
ldap_connection_url: '{{ provider.connection_url }}'
ldap_username_attribute: '{{ provider.username_attribute }}'
custom_user_search_filter: '{{ provider.custom_user_search_filter }}'
search_scope: '{{ provider.search_scope }}'
access_token: '{{ keycloak_authentication.json.access_token }}'
with_items: "{{ current_realm_ldaps }}"
loop_control:
loop_var: provider
tags:
- update_realms

@ -0,0 +1,107 @@
- name: Create ldap user storage provider in realm {{ realm }}
uri:
url: http://localhost:{{ service_port_keycloak_external }}/auth/admin/realms/{{ realm }}/components
method: POST
body_format: json
body: '{
"name": "{{ provider_name }}",
"providerId": "ldap",
"providerType": "org.keycloak.storage.UserStorageProvider",
"parentId": "{{ realm }}",
"config": {
"allowKerberosAuthentication": ["false"],
"authType": ["simple"],
"batchSizeForSync": ["1000"],
"bindCredential": ["{{ ldap_password }}"],
"bindDn": ["{{ ldap_username }}"],
"cachePolicy": ["DEFAULT"],
"changedSyncPeriod": ["86400"],
"connectionPooling": ["true"],
"connectionUrl": ["{{ ldap_connection_url }}"],
"customUserSearchFilter": ["{{ custom_user_search_filter }}"],
"debug": ["false"],
"editMode": ["READ_ONLY"],
"enabled": ["true"],
"fullSyncPeriod": ["604800"],
"importEnabled": ["true"],
"pagination": ["true"],
"priority": ["0"],
"rdnLDAPAttribute": ["cn"],
"searchScope": ["{{ search_scope }}"],
"syncRegistrations": ["false"],
"trustEmail": ["false"],
"useKerberosForPasswordAuthentication": ["false"],
"usernameLDAPAttribute": ["{{ ldap_username_attribute }}"],
"userObjectClasses": ["person, organizationalPerson, user"],
"usersDn": ["{{ usersDn }}"],
"useTruststoreSpi": ["ldapsOnly"],
"uuidLDAPAttribute": ["objectGUID"],
"validatePasswordPolicy": ["false"],
"vendor": ["ad"]
}
}'
status_code: [201]
headers:
Authorization: "Bearer {{ access_token }}"
register: response
tags:
- update_realms
- name: Get id of created user storage provider
uri:
url: "{{ response.location }}"
method: GET
headers:
Authorization: "Bearer {{ access_token }}"
register: response
tags:
- update_realms
- name: Create user attribute mapper for firstName
uri:
url: http://localhost:{{ service_port_keycloak_external }}/auth/admin/realms/{{ realm }}/components
method: POST
body_format: json
body: '{
"name": "first name",
"providerId": "user-attribute-ldap-mapper",
"providerType": "org.keycloak.storage.ldap.mappers.LDAPStorageMapper",
"parentId": "{{ response.json.id }}",
"config": {
"ldap.attribute": ["givenName"],
"is.mandatory.in.ldap": ["false"],
"is.binary.attribute": ["false"],
"read.only": ["true"],
"always.read.value.from.ldap": ["false"],
"user.model.attribute": ["firstName"]
}
}'
headers:
Authorization: "Bearer {{ access_token }}"
status_code: [201]
tags:
- update_realms
- name: Create user role mappers
uri:
url: http://localhost:{{ service_port_keycloak_external }}/auth/admin/realms/{{ realm }}/components
method: POST
body_format: json
body: '{
"name": "{{ role.name }}",
"providerId": "hardcoded-ldap-role-mapper",
"providerType": "org.keycloak.storage.ldap.mappers.LDAPStorageMapper",
"config": {
"role": ["{{ role.role_id }}"],
},
"parentId": "{{ response.json.id }}",
}'
headers:
Authorization: "Bearer {{ access_token }}"
status_code: [201]
when: hardcoded_user_roles is defined
with_items: "{{ hardcoded_user_roles }}"
loop_control:
loop_var: role
tags:
- update_realms

@ -0,0 +1,61 @@
---
- name: Read users of realm {{ current_realm_name }}
uri:
url: http://localhost:{{ service_port_keycloak_external }}/auth/admin/realms/{{ current_realm_name }}/users
method: GET
headers:
Authorization: "Bearer {{ access_token}} "
status_code: [200]
register: realm_users
tags:
- create_users
- update_realms
#- name: Print realm users
# debug:
# msg: "{{ realm_users }}"
# tags:
# - create_users
# - update_realms
- name: Save realm users as variable (fact)
set_fact:
realm_users_json: "{{ realm_users.json }}"
tags:
- create_users
- update_realms
- name: Read realm user ids
set_fact:
realm_user_usernames: "{{ realm_users_json | json_query(jmesquery) }}"
vars:
jmesquery: '[*].username'
tags:
- create_users
- update_realms
#- name: Print realm usernames
# debug:
# msg: "{{ realm_user_usernames }}"
# tags:
# - create_users
# - update_realms
- name: "Create users for realm {{ current_realm_name }}"
uri:
url: http://localhost:{{ service_port_keycloak_external }}/auth/admin/realms/{{ current_realm_name }}/users
method: POST
body_format: json
body: "{{ lookup('template','keycloak-realm-create-user.json.j2') }}"
headers:
Content-Type: "application/json"
Authorization: "Bearer {{ access_token }}"
status_code: [201]
with_items: "{{ current_realm_users }}"
when: current_realm_user.username not in realm_user_usernames
loop_control:
loop_var: current_realm_user
tags:
- create_users
- update_realms

@ -0,0 +1,160 @@
---
### tags:
### create_users
### update_realms
### update_deployment
- name: "Send mattermost messsge"
uri:
url: "{{ mattermost_hook_smardigo }}"
method: POST
body: "{{ lookup('template','mattermost-deploy-start.json.j2') }}"
body_format: json
headers:
Content-Type: "application/json"
delegate_to: 127.0.0.1
become: false
when:
- send_status_messages
- name: "Setup DNS configuration for {{ service_name }}"
include_role:
name: _digitalocean
tasks_from: domain
vars:
record_data: "{{ stage_server_ip }}"
record_name: "{{ service_name }}"
- name: "Check if {{ service_name }}/docker-compose.yml exists"
stat:
path: '{{ service_base_path }}/{{ service_name }}/docker-compose.yml'
register: check_docker_compose_file
tags:
- update_deployment
- name: "Stop {{ service_name }}"
shell: docker-compose down
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
when: check_docker_compose_file.stat.exists
ignore_errors: yes
tags:
- update_deployment
- name: "Deploy service configuration for {{ service_name }}"
include_role:
name: _deploy
tasks_from: configs
vars:
current_config: "keycloak"
current_base_path: "{{ service_base_path }}"
current_destination: "{{ service_name }}"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
current_docker: "{{ keycloak_docker }}"
- name: "Update {{ service_name }}"
shell: docker-compose pull
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
tags:
- update_deployment
- name: "Start {{ service_name }}"
shell: docker-compose up -d
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
tags:
- update_deployment
- name: "Update landing page for {{ service_name }}"
include_role:
name: _deploy
tasks_from: caddy_landing_page
vars:
current_services: [
{
current_name: "{{ service_name }}",
current_url: "{{ http_s }}://{{ keycloak_id }}.{{ domain }}",
current_version: "{{ keycloak_version }}",
current_date: "{{ ansible_date_time.iso8601 }}",
},
]
tags:
- update_deployment
- name: "Wait for {{ service_port_keycloak_external }}"
wait_for:
port: '{{ service_port_keycloak_external }}'
delay: 60
- name: "Authenticate with Keycloak server"
uri:
url: "http://localhost:{{ service_port_keycloak_external }}/auth/realms/master/protocol/openid-connect/token"
method: POST
body_format: form-urlencoded
body: 'username={{ keycloak_admin_username }}&password={{ keycloak_admin_password }}&client_id=admin-cli&grant_type=password'
retries: 5
delay: 5
register: keycloak_authentication
tags:
- create_users
- update_realms
- name: "Create user storage provider in master realm"
include_tasks: configure_user_storage_provider_ldap.yml
vars:
access_token: "{{ keycloak_authentication.json.access_token }}"
realm: master
provider_name: '{{ item.name }}'
ldap_username: '{{ item.username }}'
ldap_password: '{{ item.password }}'
ldap_connection_url: '{{ item.connection_url }}'
ldap_username_attribute: '{{ item.username_attribute }}'
usersDn: '{{ item.usersDn }}'
custom_user_search_filter: '{{ item.custom_user_search_filter }}'
search_scope: '{{ item.search_scope }}'
hardcoded_user_roles: '{{ item.hardcoded_user_roles }}'
with_items: "{{ keycloak.master.ldap | default([]) }}"
when: keycloak.master is defined
tags:
- update_realms
- name: "Setup realms"
include_tasks: configure_realm.yml
vars:
current_realm_name: '{{ current_realm.name }}'
current_realm_display_name: '{{ current_realm.display_name }}'
current_realm_clients: '{{ current_realm.clients | default([]) }}'
current_realm_ldaps: '{{ current_realm.ldaps | default([]) }}'
access_token: "{{ keycloak_authentication.json.access_token }}"
with_items: "{{ keycloak.realms }}"
loop_control:
loop_var: current_realm
tags:
- update_realms
- name: "Create realm users"
include_tasks: create_realm_users.yml
vars:
current_realm_name: "{{ item.name }}"
current_realm_users: "{{ item.users | default([]) }}"
access_token: "{{ keycloak_authentication.json.access_token }}"
with_items: "{{ keycloak.realms }}"
tags:
- create_users
- update_realms
- name: "Send mattermost messsge"
uri:
url: "{{ mattermost_hook_smardigo }}"
method: POST
body: "{{ lookup('template','mattermost-deploy-end.json.j2') }}"
body_format: json
headers:
Content-Type: "application/json"
delegate_to: 127.0.0.1
become: false
when:
- send_status_messages

@ -0,0 +1,63 @@
{
"adminUrl": "{{ admin_url }}",
"attributes": {
"saml.assertion.signature": "false",
"saml.force.post.binding": "false",
"saml.multivalued.roles": "false",
"saml.encrypt": "false",
"saml.server.signature": "false",
"saml.server.signature.keyinfo.ext": "false",
"exclude.session.state.from.auth.response": "false",
"saml_force_name_id_format": "false",
"saml.client.signature": "false",
"tls.client.certificate.bound.access.tokens": "false",
"saml.authnstatement": "false",
"display.on.consent.screen": "false",
"saml.onetimeuse.condition": "false"
},
"authenticationFlowBindingOverrides": {},
"authorizationServicesEnabled": true,
"bearerOnly": false,
"clientAuthenticatorType": "client-secret",
"clientId": "{{ client_id }}",
"consentRequired": false,
"defaultClientScopes": [
"role_list",
"profile",
"roles",
"email"
],
"directAccessGrantsEnabled": true,
"enabled": true,
"frontchannelLogout": false,
"fullScopeAllowed": true,
"implicitFlowEnabled": false,
"name": "{{ client_name }}",
"nodeReRegistrationTimeout": -1,
"notBefore": 0,
"optionalClientScopes": [],
"protocol" : "{{ protocol | default('openid-connect') }}",
"protocolMappers": [
{
"name": "username",
"protocol": "openid-connect",
"protocolMapper": "oidc-usermodel-property-mapper",
"consentRequired": false,
"config": {
"user.attribute": "username",
"claim.name": "sub",
"id.token.claim": "true",
"access.token.claim": "true",
"userinfo.token.claim": "true"
}
}
],
"publicClient": false,
"redirectUris": {{ redirect_uris }},
"rootUrl": "{{ root_url }}",
"secret": "{{ secret }}",
"serviceAccountsEnabled": true,
"standardFlowEnabled": true,
"surrogateAuthRequired": false,
"webOrigins": {{ web_origins }}
}

@ -0,0 +1,12 @@
{
"username": "{{ current_realm_user.username }}",
"firstName": "{{ current_realm_user.firstName | default('') }}",
"lastName": "{{ current_realm_user.lastName | default('') }}",
"email": "{{ current_realm_user.email | default('') }}",
"enabled": true,
"credentials" : [{
"type": "password",
"value": "{{ current_realm_user.password }}",
"temporary": false
}]
}

@ -0,0 +1,133 @@
{
"id": "{{ current_realm_name }}",
"realm": "{{ current_realm_name }}",
"displayName": "{{ current_realm_display_name }}",
"displayNameHtml": "",
"notBefore": 0,
"revokeRefreshToken": false,
"refreshTokenMaxReuse": 0,
"accessTokenLifespan": 60,
"accessTokenLifespanForImplicitFlow": 900,
"ssoSessionIdleTimeout": 1800,
"ssoSessionMaxLifespan": 36000,
"ssoSessionIdleTimeoutRememberMe": 0,
"ssoSessionMaxLifespanRememberMe": 0,
"offlineSessionIdleTimeout": 2592000,
"offlineSessionMaxLifespanEnabled": false,
"offlineSessionMaxLifespan": 5184000,
"clientSessionIdleTimeout": 0,
"clientSessionMaxLifespan": 0,
"clientOfflineSessionIdleTimeout": 0,
"clientOfflineSessionMaxLifespan": 0,
"accessCodeLifespan": 60,
"accessCodeLifespanUserAction": 300,
"accessCodeLifespanLogin": 1800,
"actionTokenGeneratedByAdminLifespan": 43200,
"actionTokenGeneratedByUserLifespan": 300,
"enabled": true,
"sslRequired": "none",
"registrationAllowed": false,
"registrationEmailAsUsername": false,
"rememberMe": false,
"verifyEmail": false,
"loginWithEmailAllowed": false,
"duplicateEmailsAllowed": false,
"resetPasswordAllowed": false,
"editUsernameAllowed": false,
"bruteForceProtected": false,
"permanentLockout": false,
"maxFailureWaitSeconds": 900,
"minimumQuickLoginWaitSeconds": 60,
"waitIncrementSeconds": 60,
"quickLoginCheckMilliSeconds": 1000,
"maxDeltaTimeSeconds": 43200,
"failureFactor": 30,
"defaultRoles": [
"uma_authorization",
"offline_access"
],
"requiredCredentials": [
"password"
],
"otpPolicyType": "totp",
"otpPolicyAlgorithm": "HmacSHA1",
"otpPolicyInitialCounter": 0,
"otpPolicyDigits": 6,
"otpPolicyLookAheadWindow": 1,
"otpPolicyPeriod": 30,
"otpSupportedApplications": [
"FreeOTP",
"Google Authenticator"
],
"webAuthnPolicyRpEntityName": "keycloak",
"webAuthnPolicySignatureAlgorithms": [
"ES256"
],
"webAuthnPolicyRpId": "",
"webAuthnPolicyAttestationConveyancePreference": "not specified",
"webAuthnPolicyAuthenticatorAttachment": "not specified",
"webAuthnPolicyRequireResidentKey": "not specified",
"webAuthnPolicyUserVerificationRequirement": "not specified",
"webAuthnPolicyCreateTimeout": 0,
"webAuthnPolicyAvoidSameAuthenticatorRegister": false,
"webAuthnPolicyAcceptableAaguids": [
],
"webAuthnPolicyPasswordlessRpEntityName": "keycloak",
"webAuthnPolicyPasswordlessSignatureAlgorithms": [
"ES256"
],
"webAuthnPolicyPasswordlessRpId": "",
"webAuthnPolicyPasswordlessAttestationConveyancePreference": "not specified",
"webAuthnPolicyPasswordlessAuthenticatorAttachment": "not specified",
"webAuthnPolicyPasswordlessRequireResidentKey": "not specified",
"webAuthnPolicyPasswordlessUserVerificationRequirement": "not specified",
"webAuthnPolicyPasswordlessCreateTimeout": 0,
"webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister": false,
"webAuthnPolicyPasswordlessAcceptableAaguids": [
],
"browserSecurityHeaders": {
"contentSecurityPolicyReportOnly": "",
"xContentTypeOptions": "nosniff",
"xRobotsTag": "none",
"xFrameOptions": "SAMEORIGIN",
"contentSecurityPolicy": "frame-src 'self'; frame-ancestors 'self'; object-src 'none';",
"xXSSProtection": "1; mode=block",
"strictTransportSecurity": "max-age=31536000; includeSubDomains"
},
"smtpServer": {
},
"loginTheme": "smardigo-theme",
"accountTheme": "smardigo-theme",
"adminTheme": "smardigo-theme",
"eventsEnabled": false,
"eventsListeners": [
"jboss-logging"
],
"enabledEventTypes": [
],
"adminEventsEnabled": false,
"adminEventsDetailsEnabled": false,
"identityProviders": [
],
"identityProviderMappers": [
],
"internationalizationEnabled": true,
"supportedLocales": [
"de",
"en"
],
"defaultLocale": "de",
"browserFlow": "browser",
"registrationFlow": "registration",
"directGrantFlow": "direct grant",
"resetCredentialsFlow": "reset credentials",
"clientAuthenticationFlow": "clients",
"dockerAuthenticationFlow": "docker auth",
"attributes": {
"clientOfflineSessionMaxLifespan": "0",
"clientSessionIdleTimeout": "0",
"clientSessionMaxLifespan": "0",
"clientOfflineSessionIdleTimeout": "0"
},
"userManagedAccessAllowed": false
}

@ -49,7 +49,7 @@ node_exporter_docker: {
labels: [ labels: [
'"traefik.enable=true"', '"traefik.enable=true"',
'"traefik.http.routers.{{ node_exporter_id }}.service={{ node_exporter_id }}"', '"traefik.http.routers.{{ node_exporter_id }}.service={{ node_exporter_id }}"',
'"traefik.http.routers.{{ node_exporter_id }}.rule=Host(`{{ node_exporter_id }}.{{ domain }}`)"', '"traefik.http.routers.{{ node_exporter_id }}.rule=Host(`{{ service_name }}.{{ domain }}`)"',
'"traefik.http.routers.{{ node_exporter_id }}.entrypoints=admin-system"', '"traefik.http.routers.{{ node_exporter_id }}.entrypoints=admin-system"',
'"traefik.http.routers.{{ node_exporter_id }}.tls=true"', '"traefik.http.routers.{{ node_exporter_id }}.tls=true"',
'"traefik.http.routers.{{ node_exporter_id }}.tls.certresolver=letsencrypt"', '"traefik.http.routers.{{ node_exporter_id }}.tls.certresolver=letsencrypt"',

@ -13,24 +13,19 @@
when: when:
- send_status_messages - send_status_messages
- name: "Check docker networks" - name: "Check if {{ role_name }}/docker-compose.yml exists"
include_role:
name: _docker
tasks_from: networks
- name: "Check if node-exporter/docker-compose.yml exists"
stat: stat:
path: '{{ service_base_path }}/node-exporter/docker-compose.yml' path: '{{ service_base_path }}/{{ role_name }}/docker-compose.yml'
register: check_docker_compose_file register: check_docker_compose_file
- name: "Stop node-exporter" - name: "Stop {{ role_name }}"
shell: docker-compose down shell: docker-compose down
args: args:
chdir: '{{ service_base_path }}/node-exporter' chdir: '{{ service_base_path }}/{{ role_name }}'
when: check_docker_compose_file.stat.exists when: check_docker_compose_file.stat.exists
ignore_errors: yes ignore_errors: yes
- name: "Deploy service configuration for node-exporter" - name: "Deploy service configuration for {{ role_name }}"
include_role: include_role:
name: _deploy name: _deploy
tasks_from: configs tasks_from: configs
@ -42,10 +37,17 @@
current_group: "{{ docker_group }}" current_group: "{{ docker_group }}"
current_docker: "{{ node_exporter_docker }}" current_docker: "{{ node_exporter_docker }}"
- name: "Start node-exporter" - name: "Update {{ role_name }}"
shell: docker-compose pull
args:
chdir: '{{ service_base_path }}/{{ role_name }}'
tags:
- update_deployment
- name: "Start {{ role_name }}"
shell: docker-compose up -d shell: docker-compose up -d
args: args:
chdir: '{{ service_base_path }}/node-exporter' chdir: '{{ service_base_path }}/{{ role_name }}'
- name: "Send mattermost messsge" - name: "Send mattermost messsge"
uri: uri:

@ -16,27 +16,6 @@
when: when:
- send_status_messages - send_status_messages
- name: Gather current server infos
hcloud_server_info:
api_token: "{{ hetzner_authentication_token }}"
register: hetzner_server_infos
delegate_to: 127.0.0.1
become: false
- name: Save current server infos as variable (fact)
set_fact:
hetzner_server_infos_json: "{{ hetzner_server_infos.hcloud_server_info }}"
delegate_to: 127.0.0.1
become: false
- name: Read ip for {{ inventory_hostname }}
set_fact:
stage_server_ip: "{{ item.ipv4_address }}"
when: item.name == inventory_hostname
with_items: "{{ hetzner_server_infos_json }}"
delegate_to: 127.0.0.1
become: false
- name: "Setup DNS configuration for {{ service_name }} prometheus" - name: "Setup DNS configuration for {{ service_name }} prometheus"
include_role: include_role:
name: _digitalocean name: _digitalocean
@ -61,11 +40,6 @@
record_data: "{{ stage_server_ip }}" record_data: "{{ stage_server_ip }}"
record_name: "{{ service_name }}-alertmanager" record_name: "{{ service_name }}-alertmanager"
- name: "Check docker networks"
include_role:
name: _docker
tasks_from: networks
- name: "Check if {{ service_name }}/docker-compose.yml exists" - name: "Check if {{ service_name }}/docker-compose.yml exists"
stat: stat:
path: '{{ service_base_path }}/{{ service_name }}/docker-compose.yml' path: '{{ service_base_path }}/{{ service_name }}/docker-compose.yml'
@ -110,19 +84,19 @@
vars: vars:
current_services: [ current_services: [
{ {
current_name: "{{ service_prefix }}prometheus", current_name: "prometheus",
current_url: "{{ http_s}}://{{ service_name }}-prometheus.{{ domain }}", current_url: "{{ http_s}}://{{ service_name }}-prometheus.{{ domain }}",
current_version: "{{ prometheus_version }}", current_version: "{{ prometheus_version }}",
current_date: "{{ ansible_date_time.iso8601 }}", current_date: "{{ ansible_date_time.iso8601 }}",
}, },
{ {
current_name: "{{ service_prefix }}grafana", current_name: "grafana",
current_url: "{{ http_s }}://{{ service_name }}-grafana.{{ domain }}", current_url: "{{ http_s }}://{{ service_name }}-grafana.{{ domain }}",
current_version: "{{ grafana_version }}", current_version: "{{ grafana_version }}",
current_date: "{{ ansible_date_time.iso8601 }}", current_date: "{{ ansible_date_time.iso8601 }}",
}, },
{ {
current_name: "{{ service_prefix }}alertmanager", current_name: "alertmanager",
current_url: "{{ http_s }}://{{ service_name }}-alertmanager.{{ domain }}", current_url: "{{ http_s }}://{{ service_name }}-alertmanager.{{ domain }}",
current_version: "{{ alertmanager_version }}", current_version: "{{ alertmanager_version }}",
current_date: "{{ ansible_date_time.iso8601 }}", current_date: "{{ ansible_date_time.iso8601 }}",

@ -13,24 +13,19 @@
when: when:
- send_status_messages - send_status_messages
- name: "Check docker networks" - name: "Check if {{ role_name }}/docker-compose.yml exists"
include_role:
name: _docker
tasks_from: networks
- name: "Check if traefik/docker-compose.yml exists"
stat: stat:
path: '{{ service_base_path }}/traefik/docker-compose.yml' path: '{{ service_base_path }}/{{ role_name }}/docker-compose.yml'
register: check_docker_compose_file register: check_docker_compose_file
- name: "Stop traefik" - name: "Stop {{ role_name }}"
shell: docker-compose down shell: docker-compose down
args: args:
chdir: '{{ service_base_path }}/traefik' chdir: '{{ service_base_path }}/{{ role_name }}'
when: check_docker_compose_file.stat.exists when: check_docker_compose_file.stat.exists
ignore_errors: yes ignore_errors: yes
- name: "Deploy service configuration for traefik" - name: "Deploy service configuration for {{ role_name }}"
include_role: include_role:
name: _deploy name: _deploy
tasks_from: configs tasks_from: configs
@ -45,7 +40,7 @@
- name: "Ensure acme.json exists" - name: "Ensure acme.json exists"
copy: copy:
content: "" content: ""
dest: '{{ service_base_path }}/traefik/acme.json' dest: '{{ service_base_path }}/{{ role_name }}/acme.json'
force: no force: no
owner: "{{ docker_owner }}" owner: "{{ docker_owner }}"
group: "{{ docker_group }}" group: "{{ docker_group }}"
@ -58,24 +53,31 @@
vars: vars:
current_services: [] current_services: []
- name: "Update landing page for traefik" - name: "Update landing page for {{ role_name }}"
include_role: include_role:
name: _deploy name: _deploy
tasks_from: caddy_landing_page tasks_from: caddy_landing_page
vars: vars:
current_services: [ current_services: [
{ {
current_name: "traefik", current_name: "{{ role_name }}",
current_url: "{{ http_s }}://{{ stage_server_url_host }}:{{ admin_port_traefik }}", current_url: "{{ http_s }}://{{ stage_server_url_host }}:{{ admin_port_traefik }}",
current_version: "{{ traefik_image_version }}", current_version: "{{ traefik_image_version }}",
current_date: "{{ ansible_date_time.iso8601 }}", current_date: "{{ ansible_date_time.iso8601 }}",
}, },
] ]
- name: "Start traefik" - name: "Update {{ role_name }}"
shell: docker-compose pull
args:
chdir: '{{ service_base_path }}/{{ role_name }}'
tags:
- update_deployment
- name: "Start {{ role_name }}"
shell: docker-compose up -d shell: docker-compose up -d
args: args:
chdir: '{{ service_base_path }}/traefik' chdir: '{{ service_base_path }}/{{ role_name }}'
- name: "Send mattermost messsge" - name: "Send mattermost messsge"
uri: uri:

@ -29,6 +29,37 @@
tags: tags:
- install - install
- name: "Gather current server infos"
hcloud_server_info:
api_token: "{{ hetzner_authentication_token }}"
register: hetzner_server_infos
delegate_to: 127.0.0.1
become: false
- name: "Set current server infos as fact: hetzner_server_infos_json"
set_fact:
hetzner_server_infos_json: "{{ hetzner_server_infos.hcloud_server_info }}"
delegate_to: 127.0.0.1
become: false
- name: "Read ip address for {{ inventory_hostname }}"
set_fact:
stage_server_ip: "{{ item.ipv4_address }}"
when: item.name == inventory_hostname
with_items: "{{ hetzner_server_infos_json }}"
delegate_to: 127.0.0.1
become: false
- name: Print the gathered infos
debug:
var: stage_server_ip
delegate_to: 127.0.0.1
- name: "Check docker networks"
include_role:
name: _docker
tasks_from: networks
roles: roles:
- role: ansible-role-docker - role: ansible-role-docker
vars: vars:
@ -40,6 +71,11 @@
tags: tags:
- common - common
- role: filebeat
when: filebeat_enabled | default(True)
tags:
- filebeat
- role: node-exporter - role: node-exporter
when: node_exporter_enabled | default(True) when: node_exporter_enabled | default(True)
tags: tags:

@ -1,7 +1,7 @@
--- ---
- name: 'apply setup to {{ host | default("all") }}' - name: 'apply setup to {{ host | default("all") }}'
hosts: '{{ host | default("all") }}' hosts: '{{ host | default("all") }}'
serial: "{{ serial_number|default(1) }}" serial: "{{ serial_number | default(5) }}"
become: yes become: yes
pre_tasks: pre_tasks:
@ -12,8 +12,41 @@
- ansible_version.minor >= 10 - ansible_version.minor >= 10
msg: "The ansible version has to be at least ({{ ansible_version.full }})" msg: "The ansible version has to be at least ({{ ansible_version.full }})"
- name: "Gather current server infos"
hcloud_server_info:
api_token: "{{ hetzner_authentication_token }}"
register: hetzner_server_infos
delegate_to: 127.0.0.1
become: false
- name: "Set current server infos as fact: hetzner_server_infos_json"
set_fact:
hetzner_server_infos_json: "{{ hetzner_server_infos.hcloud_server_info }}"
delegate_to: 127.0.0.1
become: false
- name: "Read ip address for {{ inventory_hostname }}"
set_fact:
stage_server_ip: "{{ item.ipv4_address }}"
when: item.name == inventory_hostname
with_items: "{{ hetzner_server_infos_json }}"
delegate_to: 127.0.0.1
become: false
- name: Print the gathered infos
debug:
var: stage_server_ip
delegate_to: 127.0.0.1
- name: "Check docker networks"
include_role:
name: _docker
tasks_from: networks
roles: roles:
- role: connect - role: connect
when: "'connect' in group_names" when: "'connect' in group_names"
- role: keycloak
when: "'keycloak' in group_names"
- role: prometheus - role: prometheus
when: "'prometheus' in group_names" when: "'prometheus' in group_names"

@ -2,6 +2,7 @@
dev-connect-01 dev-connect-01
dev-connect-02 dev-connect-02
dev-connect-03 dev-connect-03
dev-connect-04
[docker_registry] [docker_registry]
dev-docker-registry-01 dev-docker-registry-01
@ -11,6 +12,9 @@ dev-elastic-stack-01
dev-elastic-stack-02 dev-elastic-stack-02
dev-elastic-stack-03 dev-elastic-stack-03
[keycloak]
dev-keycloak-01
[prometheus] [prometheus]
dev-prometheus-01 dev-prometheus-01
@ -18,6 +22,7 @@ dev-prometheus-01
connect connect
docker_registry docker_registry
elastic elastic
keycloak
prometheus prometheus
[all:children] [all:children]

@ -1,12 +0,0 @@
version: 0.1
storage:
filesystem:
rootdirectory: /var/lib/registry
delete:
enabled: true
http:
addr: 0.0.0.0:5000
debug:
addr: 0.0.0.0:5001

@ -1,7 +0,0 @@
#!/bin/sh
set -x
cp /secrets/portus.crt /usr/local/share/ca-certificates
update-ca-certificates
registry serve /etc/docker/registry/config.yml

@ -0,0 +1,20 @@
-----BEGIN CERTIFICATE-----
MIIDSjCCAjKgAwIBAgIVAO1gvUalebylIyFuIAZC6bfhz04QMA0GCSqGSIb3DQEB
CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu
ZXJhdGVkIENBMB4XDTIxMDQxODExMDkwOFoXDTIyMDQxODExMDkwOFowNDEyMDAG
A1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0Ew
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCLcbwtcUwHBNBOlLoZA+lH
xMoOrrySQNRRyLw/hV+KpW1YncCgVq3dGEOjOC3lS1B55+sZfjEn7EKfDtrZN6Pf
0Ot22/GV3r+fJi72njBfay1Cep8OCJxNOx9i0N3XO2GN6IYPMEpkqFj8nySpAgh3
70hILu3QMov2I2rWXMzE3yV6Pi7OQ151Fa8vZ1HTXkpjO7Rxyt36cXLB7slj6Uxo
72cO0WphRV6e24Fx5iRLlAs7WdXDOSUXZfIFBiZGYvuZIgbAw9M9ZR5536eXBFuQ
MuwLiP5g+D5GZbal5enRUShBknRP9Xvnxv7OOnPhMXVHMTsM9feqxVzmhRPp4XBz
AgMBAAGjUzBRMB0GA1UdDgQWBBRJ5gyop7tp96EV6O/FHIY2P3T7pzAfBgNVHSME
GDAWgBRJ5gyop7tp96EV6O/FHIY2P3T7pzAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
SIb3DQEBCwUAA4IBAQBEgehvsAW5r1/nogmIhhRVl5rZcy9mnbxsy/9udU1zBTEe
ZhgCCqOx6xffXUWSvVXw3BUUizCvB5nSHCYBt3H2f8sdPXO54b5mcld/2n/D39yw
HSODGmgkbEVjXK1Qx4xYDRHJnOuyExWQ1D7Y7HocgtIRySFdG/h7en5SM2ooJ7fa
pPtCp8f1tHHuKCjKhgC/+wlvEZFHOWcu6Hyh1FtWHwD3uu9Tj3VRKMvW0u+KQ4mC
aNEuHUEKzgwXRZvBG8Y5k35bFf9EVulTsD2fOTMWrD9CEdctQIfQnn1Oy3s43x39
94DgEx78H/5fGkUDjqljXp1RBDeNJV7+tssRMISL
-----END CERTIFICATE-----

@ -0,0 +1,21 @@
-----BEGIN CERTIFICATE-----
MIIDbjCCAlagAwIBAgIUCPrH9Oej8C0/4mg1Tum4iAzkHSMwDQYJKoZIhvcNAQEL
BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l
cmF0ZWQgQ0EwHhcNMjEwNDE4MTEwOTExWhcNMjIwNDE4MTEwOTExWjAoMSYwJAYD
VQQDEx1maWxlYmVhdC1kZXYtZWxhc3RpYy1zdGFjay0wMTCCASIwDQYJKoZIhvcN
AQEBBQADggEPADCCAQoCggEBALdenPTRywIofEt8gAlpc1KwyzINsUE3TpBGYBS2
oNAJdPC5kxSvsxclaATrAWZREPHaBiwlwgN5ApsAJzjC5NQDUrGP3ZR9Ij4Rkm5m
2yX32aWm2PurEOjhX6LPquCfadfzCctNFF1CEL/LzWenzUPN1gWSfHTIk3RGJMBt
BCb8y80RDFbFD7js7k87/zSYMlFocA/XTLWs8CTG7i71rxFVAc+9V7PWziUyIZ4j
Wa+cNDwrjmhscrA6IYf367wb+PUwcQJOVC5+NKJrJUCh91hYPn8Z34RGePuIOAjw
ITl13KrIK651pl1hear4SabGpFDX7uZwhfzMj31aJmqMQx0CAwEAAaOBgzCBgDAd
BgNVHQ4EFgQUiBlwII0trXd0F2tfRcHjcjgkZAAwHwYDVR0jBBgwFoAUSeYMqKe7
afehFejvxRyGNj90+6cwMwYDVR0RBCwwKoIJbG9jYWxob3N0gh1maWxlYmVhdC1k
ZXYtZWxhc3RpYy1zdGFjay0wMTAJBgNVHRMEAjAAMA0GCSqGSIb3DQEBCwUAA4IB
AQBogbmwhFmJVu9Sr9E9dhsj622aVvg3MQvp1tkvZ1S+ATXELwzvoKStmlvSUWdD
4KB4oAgK/b6a01WrJC1vF3RPHMF7JGmfGRqeJyYtk4uGfWWshnex+Ub0ooffd6l4
I+sGFwiGuqHuekp5w0VEdgtRrrCaWXoHahIxiSdhcqaiRlS0TI8LOkjkTa4Y26am
aNg4PrP2dupJGGC94gEdomzaDw63tJsD3kSGuG3YVHMDmdySJv+ivJoudPsY+zva
dfwAVmpWVFdfd3L3twf7Mge68Zfcf8gIqxRTwr5LWfj//cu4ZbyiEe6gqgZs9Z2r
V/aVoiOdEjhuEaQh+m3sNkfq
-----END CERTIFICATE-----

@ -0,0 +1,27 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEogIBAAKCAQEAt16c9NHLAih8S3yACWlzUrDLMg2xQTdOkEZgFLag0Al08LmT
FK+zFyVoBOsBZlEQ8doGLCXCA3kCmwAnOMLk1ANSsY/dlH0iPhGSbmbbJffZpabY
+6sQ6OFfos+q4J9p1/MJy00UXUIQv8vNZ6fNQ83WBZJ8dMiTdEYkwG0EJvzLzREM
VsUPuOzuTzv/NJgyUWhwD9dMtazwJMbuLvWvEVUBz71Xs9bOJTIhniNZr5w0PCuO
aGxysDohh/frvBv49TBxAk5ULn40omslQKH3WFg+fxnfhEZ4+4g4CPAhOXXcqsgr
rnWmXWF5qvhJpsakUNfu5nCF/MyPfVomaoxDHQIDAQABAoIBAE9iMmj6efyRMl4r
o/JvKHHf/9fHfblSDD0Beo79EVl+/pVIgZgvCEU4+HNIme6FoeRSEuIB5qBCPxKD
WneESDRQy/f65F5oXe6pBM+uz6j8R8kjFkS9pjBrgU+mv79GxDetC8xrrilBdKbT
wDTjvEViUwlOhXq5arynsTls+KM3ihJfKjQh8ahal4+n/GN3R+S/F3eegKEEJq/k
vqmIPcPgHRoUb00s+zrNbiltLVlR/rU5/2vMudOrATdz0sf8DWssWBKMKv/7VYRS
GAykRuvGVtHS7UAA2zkoslSjPW6GjdxfZPRDo51VHFW1IK4PugY9OGVYR8+pwUX9
aqyPQQECgYEA3MA8xQXqrL2tZNSNr5jSUiv5CkWbbkIkS+WYPVj7JduCYXcs1s74
9AIDtmNOA5pPzHwjMtabU84BzjNOT6QDvdlkbVe9zgvq9svU9lNz2c3Q4MpbCT/i
lFhZjKe0cROSAvYk3hgUNE4DD4MymT2Q2/3sFAmbyTfPUr5bu86zonECgYEA1KZR
JsRrBuPk+7CbN8rytR/ZnT0h/SI/aev+4crDkwa0soQ4yaQK/9r91Jeq8HY/Tbwg
27c+LP33ms5/3gkpwkeh+VqeZ8fIbXJTqZ1FCadtUgLCq8vutanlaE8Z0K/enRS0
Capexp1ZDtp8eSAeyos7RPxehqHSUMeF+MHMKW0CgYBbkKGkV7/vxv2VRVU/8PPM
gdDbIeRG58iGcsWjLLWADn0WUIiY0WESVYOUs7w4YlmXSCaRf9MN//VfwohJII8s
wG+Xqz1fqjHcDNBZHGSBg42QsF7yhz1EqyD55tZB0QxPjinctcArsfAzDwh957ue
hMTXyuSDolKsz6jdTe/VAQKBgF3mLxFqTERXn4ZQPsoNMM0wCjy3gOmxFMVl8z+q
9F9Y57OoVRcc+8ps3gbhDhdub5eYyf2bVbYyUwKlyqq16x2h2fEsxaPYATXq9OyB
yLlxmAFNvL51p6vKIMXFoAWZkzhTqwhVldIoKuo3Kh2mRFJ11q8orWjPzfnjkNH+
aXOlAoGAaYv+Ft7GeR8mcZ2IIOivpVDQaYqgSN+uRA9yWd/ozvMtvN/sGkI48gco
ZlD7rsbZVhHLXTA+zbWU5G3SuBhJfwuiNWhHtuDRVqBefaXBaQr8GHxBxw1XcyxU
b+wzpN3zsSXMTW6+xrgw2I1eM07Ncn80EMfAjjUXFSLgoLVRDKk=
-----END RSA PRIVATE KEY-----

@ -0,0 +1,28 @@
filebeat.inputs:
- type: container
paths:
-/var/lib/docker/containers/*/*.log
filebeat.autodiscover:
providers:
- type: docker
hints.enabled: true
templates:
- condition:
contains:
docker.container.image: smardigo
config:
- type: container
paths:
- /var/lib/docker/containers/${data.docker.container.id}/*.log
multiline.pattern: '^{|^[0-9]{4}-[0-9]{2}-[0-9]{2}'
multiline.negate: true
multiline.match: after
output.logstash:
hosts: ["{{ logstash_hostname }}:5044"]
ssl:
certificate_authorities:
- /usr/share/filebeat/config/certificates/ca/ca.crt
certificate: /usr/share/filebeat/config/certificates/filebeat.crt
key: /usr/share/filebeat/config/certificates/filebeat.key

@ -0,0 +1,39 @@
.navbar-title {
background-color: #ffffff;
background-image: url('../img/smardigo-logo.png');
height: 30px;
width: 171px;
}
.navbar-pf {
border-top-color: #ffffff;
border-bottom-color: #b02d3f;
background: #ffffff;
border-bottom-width: 3px;
}
.navbar-pf .navbar-header {
border-color: #b02d3f;
}
.bs-sidebar ul li.active a {
color: #ffffff;
border-color: #b02d3f;
background-color: #b02d3f;
}
.btn-primary {
background-color: #b02d3f;
border-color: #b02d3f;
background-image: linear-gradient(to bottom,#b02d3f 0,#b02d3f 100%);
background-repeat: repeat-x;
}
.btn-primary.active, .btn-primary:active, .btn-primary:focus, .btn-primary:hover, .open .dropdown-toggle.btn-primary {
background-color: #b02d3f;
border-color: #b02d3f;
}
.navbar-pf .navbar-utility > li > a {
color: #000000 !important;
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 149 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.0 KiB

@ -0,0 +1,4 @@
parent=keycloak
import=common/keycloak
styles=node_modules/patternfly/dist/css/patternfly.min.css node_modules/patternfly/dist/css/patternfly-additions.min.css css/account.css css/smardigo-account.css

@ -0,0 +1,17 @@
.navbar-pf {
background: #ffffff;
}
.navbar-pf .navbar-brand {
background-image: url('../img/smardigo-logo.png');
background-size: 171px 30px;
width: 171px;
}
.navbar-pf .navbar-header {
border-bottom: 1px solid #b02d3f;
}
.navbar-pf .navbar-utility .dropdown-toggle {
color: #000000!important;
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 149 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.0 KiB

@ -0,0 +1,4 @@
parent=keycloak
import=common/keycloak
styles=node_modules/patternfly/dist/css/patternfly.min.css node_modules/patternfly/dist/css/patternfly-additions.min.css node_modules/select2/select2.css lib/angular/treeview/css/angular.treeview.css node_modules/text-security/text-security.css css/styles.css css/smardigo-styles.css

@ -0,0 +1,59 @@
/* Zentrieren der Login-Box */
.login-pf-page {
position: relative;
top: 40%; /* erst oben 40% frei lassen (50% wäre exakt zentrieren, 40% sieht aber besser aus)*/
transform: translateY(-50%); /* dann die Box um die halbe Höhe hochschieben */
}
.login-pf body {
background: none; /* entfernen des Keycloak-Theme-Hintergrundes... */
background-color: #f3f5f7; /* ...dahinter ist noch ein STandard-Hintergrundbild, das verschwindet, wenn man hier eine Farbe setzt. (merkwürdig...) */
}
.login-pf-page .login-pf-header h1 {
font-size: 24px;
font-weight: bold;
text-align: left;
}
.login-pf-page .card-pf {
box-shadow: 0 4px 8px 0 rgba(0, 0, 0, 0.2), 0 6px 20px 0 rgba(0, 0, 0, 0.19);
}
#kc-header {
background-image: url("../img/smardigo-logo.png");
background-size: contain;
background-repeat: no-repeat;
background-position: center;
}
/* Keycloak-Client-Name verstecken, stattdessen wird unser Smardigo-Logo angezeigt */
#kc-header-wrapper {
visibility: hidden;
}
.btn-primary {
background-image: linear-gradient(to bottom,#006a68 0,#006a68 100%);
border-color: #006a68;
}
.btn-primary.active, .btn-primary:active, .btn-primary:focus, .btn-primary:hover, .open .dropdown-toggle.btn-primary, btn-lg {
background-color: #b02d3f;
border-color: #b02d3f;
}
.form-control:hover {
border-color: #b02d3f;
}
.form-control:focus {
border-color: #b02d3f;
}
.card-pf {
border-top-color: #b02d3f;
}
.pf-m-primary {
background-color: #b02d3f!important;
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 149 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

@ -0,0 +1,4 @@
parent=keycloak
import=common/keycloak
styles=node_modules/patternfly/dist/css/patternfly.min.css node_modules/patternfly/dist/css/patternfly-additions.min.css lib/zocial/zocial.css css/login.css css/tile.css css/smardigo-login.css

@ -40,7 +40,7 @@ scrape_configs:
relabel_configs: relabel_configs:
- source_labels: [__address__] - source_labels: [__address__]
target_label: instance target_label: instance
replacement: '{{ service_prefix }}prometheus.{{ domain }}' replacement: 'prometheus.{{ domain }}'
############################################## ##############################################
@ -71,7 +71,7 @@ scrape_configs:
static_configs: static_configs:
- targets: [ - targets: [
{% for host in groups['connect'] | default([]) %} {% for host in groups['connect'] | default([]) %}
'{{ host }}-connect.{{ domain }}:{{ monitor_port_service }}', '{{ host }}.{{ domain }}:{{ monitor_port_service }}',
{% endfor %} {% endfor %}
] ]
labels: labels:

Loading…
Cancel
Save