DEV-173: review/regression/cleanup: connect + wordpress

master
Sven Ketelsen 4 years ago
parent 7f4b338477
commit 38b103e363

@ -19,15 +19,15 @@
ansible-galaxy collection install hetzner.hcloud
ansible-galaxy collection install community.general
ansible-galaxy install geerlingguy.kubernetes
ansible-galaxy collection install community.kubernetes
ansible-galaxy collection install community.mysql
## With poetry
poetry run ansible-galaxy collection install hetzner.hcloud
poetry run ansible-galaxy collection install community.general
poetry run ansible-galaxy install geerlingguy.kubernetes
poetry run ansible-galaxy collection install community.kubernetes
poetry run ansible-galaxy collection install community.mysql
# Setup
Create/Start servers for stage-dev

@ -2,4 +2,3 @@ python38-devel [platform:rpm compile]
subversion [platform:rpm]
subversion [platform:dpkg]
git-lfs [platform:rpm]

@ -3,3 +3,4 @@ collections:
- ansible.posix
- hetzner.hcloud
- community.general
- community.mysql

@ -3,3 +3,4 @@ collections:
- ansible.posix
- hetzner.hcloud
- community.general
- community.mysql

@ -1,24 +0,0 @@
- hosts: localhost
connection: local
gather_facts: false
vars:
repository: "git@git.dev-at.de:smardigo/maven/smardigo-maven-versions.git"
branch_type: "spike"
branch_ticket: "TST-0001"
pre_tasks:
- name: "Checkout git repository"
ansible.builtin.git:
repo: "{{ repository }}"
dest: "./checkout"
version: "master"
- name: "Insert/Update branch configuration"
template:
src: 'create-branch-maven.j2'
dest: './checkout/.ci_create-branch-maven'
- name: "Branching project"
shell: ". .ci_create-branch-maven"
args:
chdir: "./checkout"

@ -1,8 +1,15 @@
---
# creates postgres databases on shared service postgres server
# - executed on stage specific postgres server: {{ stage }}-postgres-01
# - creates databases to work with connect: {{ connect_postgres_database_name }}
# creates databases on shared service servers
# - postgres
# - executed on stage specific server: {{ stage }}-postgres-01
# - creates databases to work with connect: {{ connect_postgres_database }}
# - creates databases to work with management connect: {{ management_connect_postgres_database }}
# - creates databases to work with shared webdav: {{ webdav_postgres_database }}
# - creates databases to work with shared keycloak: {{ keycloak_postgres_database }}
# - maria
# - executed on stage specific server: {{ stage }}-maria-01
# - creates databases to work with connect wordpress: {{ connect_wordpress_maria_database }}
# Parameters:
# playbook inventory
@ -10,7 +17,7 @@
# tenant_id := (unique key for the tenant, e.g. customer)
# cluster_name := (business name for the cluster, e.g. product, department )
# cluster_size := (WIP node count for the cluster) (Currently max is 2 master/slave)
# cluster_service := (service to setup, e.g. connect)
# cluster_services := (services to setup, e.g. ['connect', 'wordpress', ...])
# smardigo message callback
# scope_id := (scope id of the management process)
# process_instance_id := (process instance id of the management process)
@ -33,25 +40,27 @@
msg: "The ansible version has to be at least ({{ ansible_version.full }})"
tasks:
- name: Add postgres-servers to hosts
- name: Add postgres servers to hosts if necessary
add_host:
name: "{{ stage }}-postgres-01"
groups:
- "stage_{{ stage }}"
- "{{ cluster_service }}"
- "{{ item }}"
cluster_service: "{{ item }}"
changed_when: False
when:
- cluster_service in ['connect', 'webdav']
with_items: "{{ cluster_services }}"
when: item in ['connect', 'management_connect', 'keycloak', 'webdav']
- name: Add maria-servers to hosts
- name: Add maria servers to hosts if necessary
add_host:
name: "{{ stage }}-maria-01"
groups:
- "stage_{{ stage }}"
- "{{ cluster_service }}"
- "{{ item }}"
cluster_service: "{{ item }}"
changed_when: False
when:
- cluster_service in ['connect_wordpress']
with_items: "{{ cluster_services }}"
when: item in ['connect_wordpress']
#############################################################
# Setup databases for created inventory
@ -92,12 +101,18 @@
- debug
roles:
- role: webdav-postgres
when: "'webdav' in group_names"
- role: connect-postgres
when: "'connect' in group_names"
- role: management-connect-postgres
when: "'management_connect' in group_names"
- role: keycloak-postgres
when: "'keycloak' in group_names"
- role: webdav-postgres
when: "'webdav' in group_names"
- role: connect-wordpress-maria
when: "'connect_wordpress' in group_names"

@ -7,6 +7,19 @@
# If `uploaded_file` and `target_database` are defined the import role imports from file basename `uploaded_file` to `target_database`
# If both role conditions match the upload role trigger first.
# Parameters:
# playbook inventory
# stage := the type of the stage (e.g. dev, int, qa, prod)
# tenant_id := (unique key for the tenant, e.g. customer)
# cluster_name := (business name for the cluster, e.g. product, department )
# cluster_size := (WIP node count for the cluster) (Currently max is 2 master/slave)
# cluster_services := (services to setup, e.g. ['connect', 'wordpress', ...])
# upload_file := the local file to upload (e.g. dumps/wordpress_portal.sql)
# uploaded_file := the dump file to import (e.g. wordpress_portal.sql)
# smardigo message callback
# scope_id := (scope id of the management process)
# process_instance_id := (process instance id of the management process)
# smardigo_management_action := (smardigo management action anme of the management process)
#############################################################
# Creating inventory dynamically for given parameters
@ -27,14 +40,14 @@
tasks:
- name: Add hosts
add_host:
name: "{{ stage }}-{{ cluster_name }}-{{ '%02d' | format(item|int) }}"
name: "{{ stage }}-maria-01"
groups:
- "stage_{{ stage }}"
- "upload_local_file"
- "import_maria_database"
with_sequence: start=1 end={{ cluster_size | default(1) }}
- "{{ item }}"
cluster_service: "{{ item }}"
changed_when: False
with_items: "{{ cluster_service }}"
when: item in ['connect_wordpress']
#############################################################
# Setup services for created inventory
@ -72,6 +85,46 @@
roles:
- role: upload-local-file
when: "'upload_local_file' in group_names and upload_file is defined"
when:
- "'connect_wordpress' in group_names"
- "upload_file is defined"
- role: import-maria-database
when:
- "'connect_wordpress' in group_names"
- "target_database is defined"
- "uploaded_file is defined"
- role: import-maria-database
when: "'import_maria_database' in group_names and target_database is defined and uploaded_file is defined"
vars:
target_database: "{{ connect_wordpress_maria_database }}"
when:
- "'connect_wordpress' in group_names"
- "uploaded_file is defined"
#############################################################
# Sending smardigo management message to process
#############################################################
- hosts: "stage_{{ stage }}"
serial: "{{ serial_number | default(5) }}"
connection: local
gather_facts: false
post_tasks:
- name: "Sending smardigo management message <{{ smardigo_management_action }}> to <{{ scope_id }}/{{ process_instance_id }}>"
uri:
url: "{{ smardigo_management_url }}"
method: POST
body_format: json
body: "{{ lookup('template','smardigo-management-message.json.j2') }}"
headers:
accept: "*/*"
Content-Type: "application/json"
Smardigo-User-Token: "{{ smardigo_management_token }}"
status_code: [200]
delegate_to: 127.0.0.1
retries: 5
delay: 5
when:
- scope_id is defined
- process_instance_id is defined
- smardigo_management_action is defined

@ -6,7 +6,7 @@
# tenant_id := (unique key for the tenant, e.g. customer)
# cluster_name := (business name for the cluster, e.g. product, department )
# cluster_size := (WIP node count for the cluster)
# cluster_service := (service to setup, e.g. connect)
# cluster_services := (services to setup, e.g. ['connect', 'wordpress', ...])
# playbook roles (keycloak / oidc)
# current_realm_name :=
# current_realm_display_name :=

@ -6,7 +6,7 @@
# tenant_id := (unique key for the tenant, e.g. customer)
# cluster_name := (business name for the cluster, e.g. product, department )
# cluster_size := (WIP node count for the cluster)
# cluster_service := (service to setup, e.g. connect)
# cluster_services := (services to setup, e.g. ['connect', 'wordpress', ...])
# smardigo message callback
# scope_id := (scope id of the management process)
# process_instance_id := (process instance id of the management process)

@ -1,5 +1,17 @@
---
# Parameters:
# playbook inventory
# stage := the type of the stage (e.g. dev, int, qa, prod)
# tenant_id := (unique key for the tenant, e.g. customer)
# cluster_name := (business name for the cluster, e.g. product, department )
# cluster_size := (WIP node count for the cluster)
# cluster_services := (services to setup, e.g. ['connect', 'wordpress', ...])
# smardigo message callback
# scope_id := (scope id of the management process)
# process_instance_id := (process instance id of the management process)
# smardigo_management_action := (smardigo management action anme of the management process)
#############################################################
# Creating inventory dynamically for given parameters
#############################################################
@ -19,12 +31,29 @@
tasks:
- name: Add hosts
add_host:
name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ '%02d' | format(item|int) }}"
name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-01"
# name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ '%02d' | format(item|int) }}"
groups:
- "stage_{{ stage }}"
- "{{ cluster_service }}"
with_sequence: start=1 end={{ cluster_count | default(1) }}
- "{{ item }}"
changed_when: False
# with_sequence: start=1 end={{ cluster_count | default(1) }}
with_items: "{{ cluster_services }}"
when: item in ['connect']
- name: Add hosts
add_host:
name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-01"
# name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ '%02d' | format(item|int) }}"
groups:
- "stage_{{ stage }}"
- "{{ cluster_service }}"
- "{{ item }}"
changed_when: False
# with_sequence: start=1 end={{ cluster_count | default(1) }}
with_items: "{{ cluster_services }}"
when: item in ['connect_wordpress']
#############################################################
# Setup services for created inventory
@ -63,11 +92,12 @@
roles:
- role: connect
when: "'connect' in group_names"
- role: connect-wordpress
when: "'connect_wordpress' in group_names"
#############################################################
# run provisioning against newly created inventory
# Sending smardigo management message to process
#############################################################
- hosts: "stage_{{ stage }}"

@ -76,10 +76,6 @@ tasks:
to: smardigo/smardigo-workflow-proxy-app
tags:
- 'regex: ^(latest)|(([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+))$'
- from: smardigo/caddy
to: smardigo/caddy
tags:
- 'regex: ^(latest)|(([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+))$'
- name: sensw
interval: 600

@ -4,18 +4,25 @@ debug: false
local_ssh_config: false
send_status_messages: false
domain: smardigo.digital
use_ssl: true
http_s: "http{{ use_ssl | ternary('s', '', omit) }}"
service_name: "{{ inventory_hostname }}"
stage_server_name: "{{ inventory_hostname }}"
stage_server_hostname: "{{ inventory_hostname }}"
stage_server_url_host: "{{ stage_server_name }}.{{ domain }}"
stage_server_url: "{{ http_s }}://{{ stage_server_name }}.{{ domain }}"
domain: "smardigo.digital"
stage_server_domain: "{{ inventory_hostname }}.{{ domain }}"
stage_server_url: "{{ http_s }}://{{ stage_server_domain }}"
ansible_ssh_host: "{{ stage_server_domain }}"
ansible_ssh_host: "{{ inventory_hostname }}.{{ domain }}"
hetzner_server_type: cx11
hetzner_server_image: ubuntu-20.04
# Used for root-access
hetzner_ssh_keys:
- ansible@smardigo.digital
- sven.ketelsen@netgo.de
- peter.heise@netgo.de
- claus.paetow@netgo.de
- alexander.gordon@netgo.de
hetzner_server_labels: "stage={{ stage }}"
@ -52,8 +59,8 @@ default_plattform_users:
- '{{ admin_user }}'
smardigo_plattform_users:
- 'peter.heise'
- 'sven.ketelsen'
- 'peter.heise'
- 'claus.paetow'
- 'alexander.gordon'
@ -64,16 +71,6 @@ service_base_path: '/etc/smardigo'
# TODO we need a company email address
lets_encrypt_email: "sven.ketelsen@netgo.de"
# TODO place caddy configfile system relativ instead of docker folder relative
caddy_base_path: '{{ service_base_path }}/caddy'
caddy_config_file_path: 'config/caddy/Caddyfile'
caddy_config_file_path_full: '{{ caddy_base_path }}/{{ caddy_config_file_path }}'
caddy_landing_page_file_path: 'config/static_files/index.html'
caddy_landing_page_file_path_full: '{{ caddy_base_path }}/{{ caddy_landing_page_file_path }}'
caddy_landing_page_service_table_file_path: 'config/static_files/service_table.json'
caddy_landing_page_service_table_file_path_full: '{{ caddy_base_path }}/{{ caddy_landing_page_service_table_file_path }}'
caddy_landing_page_service_table_folder_path_full: '{{ caddy_base_path }}/config/static_files/'
http_port: "80"
https_port: "443"
@ -104,17 +101,6 @@ monitor_port_harbor: "9085"
admin_port_traefik: "9080"
hetzner_server_type: cx11
hetzner_server_image: ubuntu-20.04
# Used for root-access
hetzner_ssh_keys:
- ansible@smardigo.digital
- peter.heise@netgo.de
- sven.ketelsen@netgo.de
- claus.paetow@netgo.de
- alexander.gordon@netgo.de
#reverse_proxy_admin_username: "< see vault >"
#reverse_proxy_admin_password: "< see vault >"
@ -145,5 +131,5 @@ hetzner_ssh_keys:
#vault_ansible_password: "< see vault >"
#vault_replicator_user_password: "< see vault >"
mysql_root_username: "root"
mysql_root_password: "maria-admin"
#mysql_root_username: "< see vault >"
#mysql_root_password: "< see vault >"

@ -1,63 +1,31 @@
---
service: "connect"
connect_process_search_module: "external"
connect_loglevel_message_queue: "DEBUG"
connect_loglevel_document_index: "DEBUG"
connect_loglevel_workflow_index: "DEBUG"
connect_loglevel_workflow_analysis: "DEBUG"
connect_image_version: "8.3.0-SMARCH-70-1-SNAPSHOT"
hetzner_server_type: cx21
hetzner_server_labels: "stage={{ stage }} service={{ service }}"
connect_client_id: "{{ cluster_name }}"
hetzner_server_labels: "stage={{ stage }} service=connect"
connect_client_admin_username: "connect-admin"
connect_client_admin_password: "connect-admin"
current_realm_users: [
{
"username": "{{ connect_client_admin_username }}",
"password": "{{ connect_client_admin_password }}",
}
]
connect_image_version: "8.3.0-SNAPSHOT"
connect_realm_admin_username: "connect-realm-admin"
connect_realm_admin_password: "connect-realm-admin"
# unique id for a service, will be used for service access management as well (e.g. keycloak realm)
connect_client_id: "{{ cluster_name }}"
current_realm_clients: [
{
clientId: "{{ connect_client_id }}",
name: '{{ connect_client_id }}',
admin_url: '',
root_url: '',
redirect_uris: '
[
"https://{{ service_name }}.{{ domain }}/*",
]',
secret: '{{ cluster_name }}',
web_origins: '
[
"https://{{ service_name }}.{{ domain }}/*",
]',
}
]
connect_id: "{{ inventory_hostname }}-connect"
connect_base_url: "{{ connect_id }}.{{ domain }}"
connect_postgres_host: "{{ shared_service_pg_master_hostname }}"
connect_postgres_database_name: "{{ stage }}_{{ tenant_id }}_{{ cluster_name }}_{{ cluster_service }}"
connect_postgres_admin_username: "root"
connect_postgres_admin_password: "connect-postgres-admin"
connect_postgres_database: "{{ stage }}_{{ tenant_id }}_{{ cluster_name }}_connect"
connect_postgres_username: "{{ connect_postgres_database }}"
connect_postgres_password: "connect-postgres-admin"
#connect_process_search_module: "external"
connect_elastic_host: "dev-elastic-stack-01-elastic"
connect_elastic_username: "{{ elastic_admin_username }}"
connect_elastic_password: "{{ elastic_admin_password }}"
connect_elastic_ca: "file:/usr/share/smardigo/ca.crt"
connect_elastic_prefix: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}"
connect_iam_module: external
smardigo_iam_client_enabled: 'true'
smardigo_iam_client_server_url: https://dev-iam-01.smardigo.digital
connect_iam_module: "external"
smardigo_iam_client_enabled: "true"
smardigo_iam_client_server_url: "{{ http_s }}://{{ shared_service_iam_hostname }}/"
connect_auth_module: "oidc"
connect_oidc_client_id: "{{ connect_client_id }}"
@ -65,8 +33,8 @@ connect_oidc_client_secret: "{{ cluster_name }}"
connect_oidc_registration_id: "{{ connect_client_id }}"
connect_oidc_issuer_uri: "https://{{ shared_service_keycloak_hostname }}/auth/realms/{{ current_realm_name }}"
connect_password_change_url: "https://{{ shared_service_keycloak_hostname }}/auth/realms/{{ current_realm_name }}/account/password"
connect_iam_user_management_url: "https://{{ shared_service_keycloak_hostname }}/auth/admin/{{ current_realm_name }}/console"
connect_password_change_url: "{{ http_s }}://{{ shared_service_keycloak_hostname }}/auth/realms/{{ current_realm_name }}/account/password"
connect_iam_user_management_url: "{{ http_s }}://{{ shared_service_keycloak_hostname }}/auth/admin/{{ current_realm_name }}/console"
connect_jwt_enabled: true
connect_jwt_secret: 908ae14462d049d3be84964ef379c7c6
@ -78,3 +46,8 @@ webdav_jwt_secret: "5646aee6dadc4c19b15f4b65f1e6549f"
#connect_csrf_token_value: "< see vault >"
connect_mail_properties_simulation: false
connect_loglevel_message_queue: "DEBUG"
connect_loglevel_document_index: "DEBUG"
connect_loglevel_workflow_index: "DEBUG"
connect_loglevel_workflow_analysis: "DEBUG"

@ -1,50 +1,9 @@
---
connect_wordpress_maria_database_name: "{{ stage }}_{{ tenant_id }}_{{ cluster_name }}_{{ cluster_service }}"
connect_wordpress_maria_username: "{{ connect_wordpress_maria_database_name }}"
connect_wordpress_maria_password: "connect-wordpress-maria-admin"
connect_wordpress_maria_host: "{{ shared_service_maria_hostname }}"
connect_wordpress_maria_database_name: "{{ stage }}_{{ tenant_id }}_{{ cluster_name }}_{{ cluster_service }}"
connect_wordpress_maria_username: "{{ connect_wordpress_maria_database_name }}"
connect_wordpress_maria_database: "{{ stage }}_{{ tenant_id }}_{{ cluster_name }}_connect_wordpress"
connect_wordpress_maria_username: "{{ connect_wordpress_maria_database }}"
connect_wordpress_maria_password: "connect-wordpress-maria-admin"
wordpress_domain_external: "https://{{ stage_server_url_host }}"
wordpress_client_id: "{{ cluster_name }}"
wordpress_buergerportal_username: "wordpress-admin"
wordpress_buergerportal_password: "wordpress-admin"
current_realm_users: [
{
"username": "{{ wordpress_buergerportal_username }}",
"password": "{{ wordpress_buergerportal_password }}",
}
]
current_realm_clients: [
{
clientId: "{{ wordpress_client_id }}",
name: '{{ wordpress_client_id }}',
admin_url: '',
root_url: '',
redirect_uris: '
[
"https://{{ service_name }}.{{ domain }}/*",
]',
secret: '{{ cluster_name }}',
web_origins: '
[
"https://{{ service_name }}.{{ domain }}/*",
]',
}
]
wordpress_oidc_client_id: "{{ wordpress_client_id }}"
wordpress_oidc_client_secret: "{{ cluster_name }}"
sk_nrw_issuer: "idc"
sk_nrw_provider_url: "idc"
sk_nrw_client_id: "idc"
sk_nrw_client_secret: "idc"
smardigo_auth_token_name: "idc"
smardigo_auth_token_value: "idc"

@ -1,9 +1,16 @@
---
hetzner_server_type: cx11
hetzner_server_labels: "stage={{ stage }} service=keycloak"
keycloak_postgres_host: "{{ shared_service_pg_master_hostname }}"
keycloak_postgres_database: "{{ stage }}_keycloak"
keycloak_postgres_username: "{{ keycloak_postgres_database }}"
keycloak_postgres_password: "keycloak-postgres-admin"
keycloak_server_url: "http://localhost:{{ service_port_keycloak_external }}"
# TODO shouldn't be declared in a static way -> must be stage specific
keycloak: {
realms: [
{
@ -13,19 +20,25 @@ keycloak: {
{
"username": "docker-admin",
"password": "docker-admin",
"email": "sven.ketelsen@netgo.de"
"email": "docker-admin@smardigo.digital",
}
],
groups: [
{
"name": "admin",
},
{
"name": "awx",
},
{
"name": "sensw",
},
{
"name": "smardigo",
},
{
"name": "ssp",
},
],
clients: [
{
@ -35,7 +48,7 @@ keycloak: {
root_url: '',
redirect_uris: '
[
"https://dev-docker-registry-01.smardigo.digital/*"
"https://dev-docker-registry-01.smardigo.digital/*",
]',
secret: 'f1f852b4-2e75-448a-9596-3c77d53ce405',
web_origins: '
@ -62,12 +75,12 @@ keycloak: {
root_url: '',
redirect_uris: '
[
"https://dev-management-smardigo-01.smardigo.digital/*"
"https://dev-management-smardigo-01-connect.smardigo.digital/*",
]',
secret: 'f1f852b4-2e75-889a-2453-3c55d53ce405',
web_origins: '
[
"https://dev-management-smardigo-01.smardigo.digital",
"https://dev-management-smardigo-01-connect.smardigo.digital",
]',
}
]

@ -0,0 +1,11 @@
---
hetzner_server_type: cpx11
hetzner_server_labels: "stage={{ stage }} service=connect"
management_connect_jwt_secret: "f1a291c1946f47e1815b73d01a7da814"
management_connect_postgres_host: "{{ shared_service_pg_master_hostname }}"
management_connect_postgres_database: "dev_management_smardigo_connect"
management_connect_postgres_username: "{{ connect_postgres_database }}"
management_connect_postgres_password: "connect-postgres-admin"

@ -29,11 +29,12 @@ shared_service_maria_hostname: "dev-maria-01.smardigo.digital"
shared_service_pg_master_hostname: "dev-postgres-01.smardigo.digital"
shared_service_pg_slave_hostname: "dev-postgres-02.smardigo.digital"
shared_service_webdav_hostname: "dev-webdav-01.smardigo.digital"
management_service_connect_hostname: "dev-management-smardigo-01-connect.smardigo.digital"
shared_service_hosts: [
{
ip: "127.0.1.1",
name: "{{ stage_server_hostname }}"
name: "{{ inventory_hostname }}"
},
# TODO bind address ist set to public ip instead of 0.0.0.0
# {
@ -209,8 +210,8 @@ pgadmin_extra_hosts: [
}
]
smardigo_management_url: "https://dev-management-smardigo-01.smardigo.digital/api/v1/scopes/{{ scope_id }}/processes/{{ process_instance_id }}/messages"
smardigo_management_token: "eyJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiZGlyIn0..JgixZcmsSyvovabQvREAjw.Fk7aNYwOjzMhLCqF_9unl5yrWTey26z4scZBeVZjhpE.fnovrqn0MUjM_TA8zVhXdQ"
smardigo_management_url: "{{ http_s }}://{{ management_service_connect_hostname }}/api/v1/scopes/{{ scope_id }}/processes/{{ process_instance_id }}/messages"
smardigo_management_token: "eyJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiZGlyIn0..rCRO1cVFgkyZ45D5cJNK5g.fc6JVOo5ja5sqe-0PQTfJGOivJ6tyiD-rwgY6rXJ3-U.tOgqgJ2zTjB3_M9BGtvVjQ"
awx_admin_username: "awx-admin"
awx_admin_password: "awx-admin"

@ -6,6 +6,6 @@ hetzner_server_labels: "stage={{ stage }} service=webdav"
webdav_jwt_secret: "5646aee6dadc4c19b15f4b65f1e6549f"
webdav_postgres_host: "{{ shared_service_pg_master_hostname }}"
webdav_postgres_database_name: "{{ stage }}_webdav"
webdav_postgres_admin_username: "{{ webdav_postgres_database_name }}"
webdav_postgres_admin_password: "webdav-postgres-admin"
webdav_postgres_database: "{{ stage }}_webdav"
webdav_postgres_username: "{{ webdav_postgres_database }}"
webdav_postgres_password: "webdav-postgres-admin"

@ -1,10 +1,10 @@
---
connect_auth_module: oidc
connect_oidc_client_id: management-smardigo
connect_oidc_client_secret: f1f852b4-2e75-889a-2453-3c55d53ce405
connect_oidc_registration_id: management-smardigo
connect_oidc_issuer_uri: https://{{ shared_service_keycloak_hostname }}/auth/realms/smardigo
hetzner_server_type: cpx21
connect_password_change_url: https://{{ shared_service_keycloak_hostname }}/auth/realms/smardigo/account/password
connect_iam_user_management_url: https://{{ shared_service_keycloak_hostname }}/auth/admin/smardigo/console
connect_elastic_prefix: "dev_management_smardigo_connect"
connect_postgres_database: "dev_management_smardigo_connect"
current_realm_name: "smardigo"
connect_client_id: "management-smardigo"
connect_oidc_client_secret: "f1f852b4-2e75-889a-2453-3c55d53ce405"

@ -1,55 +0,0 @@
---
- name: "Check if landing page service table exists"
stat:
path: "{{ caddy_landing_page_service_table_file_path_full }}"
register: check_caddy_landing_page_service_table_file
tags:
- update_deployment
- name: "Read landing page service table data"
slurp:
src: "{{ caddy_landing_page_service_table_file_path_full }}"
register: landing_page_service_table_plain
when: check_caddy_landing_page_service_table_file.stat.exists
tags:
- update_deployment
- name: "Set landing page service table as variable"
set_fact:
landing_page_service_table: "{{ landing_page_service_table_plain['content'] | b64decode }}"
when: check_caddy_landing_page_service_table_file.stat.exists
tags:
- update_deployment
- name: "Read landing page service table data"
set_fact:
landing_page_service_table: []
when: not check_caddy_landing_page_service_table_file.stat.exists
tags:
- update_deployment
- name: "Update landing page service table variable"
set_fact:
landing_page_service_table: "{{ ([item] + landing_page_service_table) | unique(attribute='current_name') }}"
with_items: "{{ current_services }}"
tags:
- update_deployment
- name: 'Ensures {{ caddy_landing_page_service_table_folder_path_full }} directory exists'
file:
state: directory
path: '{{ caddy_landing_page_service_table_folder_path_full }}'
tags:
- update_deployment
- update_config
- name: "Write landing page service table"
copy:
content: "{{ landing_page_service_table | to_nice_json }}"
dest: "{{ caddy_landing_page_service_table_file_path_full }}"
owner: "{{ docker_owner }}"
group: "{{ docker_group }}"
mode: 0644
tags:
- update_deployment

@ -540,27 +540,27 @@
include_tasks: awx-config-job-template.yml
loop:
- {
name: "create-server",
#description: "create-server",
#playbook_file: "create-server.yml",
name: "create-database-cluster",
#description: "create-database-cluster",
#playbook_file: "create-database-cluster.yml",
credentials: [
"{{ awx_credential_hetzner_ansible_vault_id }}",
"{{ awx_credential_hetzner_ansible_id }}",
]
}
- {
name: "create-database-container",
#description: "create-database-container",
#playbook_file: "create-database-container.yml",
name: "create-db-import",
#description: "create-db-import",
#playbook_file: "create-db-import.yml",
credentials: [
"{{ awx_credential_hetzner_ansible_vault_id }}",
"{{ awx_credential_hetzner_ansible_id }}",
]
}
- {
name: "create-database-cluster",
#description: "create-database-cluster",
#playbook_file: "create-database-cluster.yml",
name: "create-server",
#description: "create-server",
#playbook_file: "create-server.yml",
credentials: [
"{{ awx_credential_hetzner_ansible_vault_id }}",
"{{ awx_credential_hetzner_ansible_id }}",

@ -4,7 +4,7 @@
### kube_install
### awx_config
- name: "Install pip3 for {{ service_name }}"
- name: "Install pip3 for {{ inventory_hostname }}"
apt:
name: python3-pip
state: present
@ -12,7 +12,7 @@
tags:
- kube_install
- name: "Install kubernetes over pip3 for {{ service_name }}"
- name: "Install kubernetes over pip3 for {{ inventory_hostname }}"
pip:
name: kubernetes
state: present
@ -20,7 +20,7 @@
tags:
- kube_install
- name: "Install and setup kubernetes (single node, master-only cluster) for {{ service_name }}"
- name: "Install and setup kubernetes (single node, master-only cluster) for {{ inventory_hostname }}"
include_role:
name: ansible-role-kubernetes
vars:
@ -30,7 +30,7 @@
tags:
- kube_install
- name: "Download awx {{ awx_operator_version }} to kubernetes template for {{ service_name }}"
- name: "Download awx {{ awx_operator_version }} to kubernetes template for {{ inventory_hostname }}"
get_url:
url: "{{ awx_operator_url }}"
dest: /tmp/awx-operator.yaml
@ -38,7 +38,7 @@
tags:
- kube_install
- name: "Apply awx {{ awx_operator_version }} to kubernetes {{ service_name }}"
- name: "Apply awx {{ awx_operator_version }} to kubernetes {{ inventory_hostname }}"
kubernetes.core.k8s:
state: present
src: /tmp/awx-operator.yaml
@ -48,7 +48,7 @@
tags:
- kube_install
- name: "Ensure that postgres volume directory exists for {{ service_name }}"
- name: "Ensure that postgres volume directory exists for {{ inventory_hostname }}"
file:
path: "{{ item }}"
state: directory
@ -60,7 +60,7 @@
tags:
- kube_install
- name: "Copy deployment template for {{ service_name }}"
- name: "Copy deployment template for {{ inventory_hostname }}"
template:
src: awx-deployment.yml.j2
dest: /tmp/awx-deployment.yml
@ -70,7 +70,7 @@
tags:
- kube_install
- name: "Create a awx k8s namespace for {{ service_name }}"
- name: "Create a awx k8s namespace for {{ inventory_hostname }}"
kubernetes.core.k8s:
name: "{{ kubernetes_awx_namespace }}"
api_version: v1
@ -79,7 +79,7 @@
tags:
- kube_install
- name: "Apply awx deployment for {{ service_name }}"
- name: "Apply awx deployment for {{ inventory_hostname }}"
kubernetes.core.k8s:
state: present
src: /tmp/awx-deployment.yml
@ -89,7 +89,7 @@
tags:
- kube_install
- name: "Cleanup for {{ service_name }}"
- name: "Cleanup for {{ inventory_hostname }}"
file:
path: "{{ item }}"
state: absent
@ -99,7 +99,7 @@
tags:
- kube_install
- name: "Wait for awx service {{ service_name }}"
- name: "Wait for awx service {{ inventory_hostname }}"
uri:
url: "http://{{ stage_server_ip }}:{{ kubernetes_awx_service_port }}"
status_code: 200
@ -111,7 +111,7 @@
tags:
- kube_install
- name: "Search for all pods labeled app.kubernetes.io/name=awx {{ service_name }}"
- name: "Search for all pods labeled app.kubernetes.io/name=awx {{ inventory_hostname }}"
kubernetes.core.k8s_info:
kind: Pod
namespace: "{{ kubernetes_awx_namespace }}"
@ -121,7 +121,7 @@
tags:
- kube_install
- name: "Wait for awx-task db-migration {{ service_name }}"
- name: "Wait for awx-task db-migration {{ inventory_hostname }}"
kubernetes.core.k8s_exec:
namespace: "{{ kubernetes_awx_namespace }}"
pod: "{{ pod_list.resources[0].metadata.name }}"

@ -36,13 +36,13 @@
tags:
- local_ssh_config
- name: "Set hostname to <{{ stage_server_hostname }}>"
- name: "Set hostname to <{{ inventory_hostname }}>"
hostname:
name: "{{ stage_server_hostname }}"
name: "{{ inventory_hostname }}"
- name: "Setting hosts configuration in /etc/hosts"
blockinfile:
marker: "# {mark} managed by ansible (hosts config for {{ stage_server_hostname }})"
marker: "# {mark} managed by ansible (hosts config for {{ inventory_hostname }})"
path: "/etc/hosts"
state: present
create: yes

@ -1,6 +1,6 @@
---
postgres_acls:
- name: "{{ connect_postgres_database_name }}"
password: "{{ connect_postgres_admin_password }}"
- name: "{{ connect_postgres_database }}"
password: "{{ connect_postgres_password }}"
trusted_cidr_entry: "{{ shared_service_network }}"

@ -16,7 +16,7 @@
when:
- send_status_messages
- name: "Setup postgres for {{ service_name }}"
- name: "Setup postgres for {{ inventory_hostname }}"
include_role:
name: postgres
tasks_from: _postgres-acls

@ -1 +1,32 @@
---
# configuration for the connect realm
connect_realm_admin_username: "connect-realm-admin"
connect_realm_admin_password: "connect-realm-admin"
connect_client_admin_username: "connect-admin"
connect_client_admin_password: "connect-admin"
current_realm_clients: [
{
name: '{{ connect_client_id }}',
clientId: "{{ connect_client_id }}",
admin_url: '',
root_url: '',
redirect_uris: '
[
"{{ http_s }}://{{ connect_base_url }}/*",
]',
secret: '{{ cluster_name }}',
web_origins: '
[
"{{ http_s }}://{{ connect_base_url }}/*",
]',
}
]
current_realm_users: [
{
"username": "{{ connect_client_admin_username }}",
"password": "{{ connect_client_admin_password }}",
}
]

@ -16,12 +16,12 @@
when:
- send_status_messages
- name: "Setup realm for {{ service_name }}"
- name: "Setup realm for {{ inventory_hostname }}"
include_role:
name: keycloak
tasks_from: _authenticate
- name: "Setup realm for {{ service_name }}"
- name: "Setup realm for {{ inventory_hostname }}"
include_role:
name: keycloak
tasks_from: _configure_realm

@ -2,7 +2,7 @@
mysql_databases: [
{
name: "{{ connect_wordpress_maria_database_name }}",
name: "{{ connect_wordpress_maria_database }}",
collation: "utf8_general_ci",
encoding: "utf8",
}
@ -13,7 +13,7 @@ mysql_users: [
name: "{{ connect_wordpress_maria_username }}",
host: "%",
password: "{{ connect_wordpress_maria_password }}",
priv: "{{ connect_wordpress_maria_database_name }}.*:ALL",
priv: "{{ connect_wordpress_maria_database }}.*:ALL",
}
]

@ -13,7 +13,7 @@
when:
- send_status_messages
- name: "Setup maria for {{ service_name }}"
- name: "Setup maria for {{ inventory_hostname }}"
include_role:
name: maria
tasks_from: _create-database

@ -22,7 +22,7 @@
tasks_from: domain
vars:
record_data: "{{ stage_server_ip }}"
record_name: "{{ service_name }}-wordpress"
record_name: "{{ inventory_hostname }}-wordpress"
- name: "Check if {{ wordpress_id }}/docker-compose.yml exists"
stat:
@ -77,23 +77,6 @@
tags:
- update_deployment
- name: "Update landing page entries for {{ wordpress_id }}"
include_role:
name: _deploy
tasks_from: caddy_landing_page
vars:
current_services: [
{
current_name: "{{ wordpress_id }}",
current_url: "{{ http_s }}://{{ wordpress_id }}.{{ domain }}",
current_version: "{{ wordpress_image_version }}",
current_date: "{{ ansible_date_time.iso8601 }}",
management: "{{ http_s }}://{{ wordpress_id }}.{{ domain }}:{{ monitor_port_service }}/management",
},
]
tags:
- update_deployment
- name: "Send mattermost messsge"
uri:
url: "{{ mattermost_hook_smardigo }}"

@ -1,7 +1,7 @@
---
wordpress_id: "{{ service_name }}-wordpress"
wordpress_base_url: "{{ stage_server_hostname }}-wordpress.{{ domain }}"
wordpress_id: "{{ inventory_hostname }}-wordpress"
wordpress_base_url: "{{ inventory_hostname }}-wordpress.{{ domain }}"
wordpress_labels: [
'"traefik.enable=true"',
@ -39,24 +39,24 @@ wordpress_docker: {
"WORDPRESS_DB_HOST: \"{{ connect_wordpress_maria_host }}:{{ wordpress_mysql_port | default('3306') }}\"",
"WORDPRESS_DB_USER: \"{{ connect_wordpress_maria_username }}\"",
"WORDPRESS_DB_PASSWORD: \"{{ connect_wordpress_maria_password }}\"",
"WORDPRESS_DB_NAME: \"{{ connect_wordpress_maria_database_name }}\"",
"WORDPRESS_DB_NAME: \"{{ connect_wordpress_maria_database }}\"",
"WORDPRESS_DEBUG: \"{{ wordpress_debug | default(1) }}\"",
"WORDPRESS_DOMAIN: \"{{ wordpress_domain_external }}\"",
"WORDPRESS_DOMAIN: \"{{ http_s }}://{{ wordpress_base_url }}\"",
"WORDPRESS_CONFIG_EXTRA: |",
" define( 'WP_HOME', 'https://{{ wordpress_base_url }}' );",
" define( 'WP_SITEURL', 'https://{{ wordpress_base_url }}' );",
"AUTH_API: \"https://{{ shared_service_keycloak_hostname }}\"",
"RESOURCE_API: \"https://{{ stage_server_url_host }}\"",
"REALM_ID: \"{{ current_realm_name }}\"",
"REGISTRATION_ID: \"{{ wordpress_oidc_client_id }}\"",
"CLIENT_ID: \"{{ wordpress_oidc_client_id }}\"",
"CLIENT_SECRET: \"{{ wordpress_oidc_client_secret }}\"",
"CLIENT_USERNAME: \"{{ wordpress_buergerportal_username }}\"",
"CLIENT_PASSWORD: \"{{ wordpress_buergerportal_password }}\"",
"SK_NRW_ISSUER: \"{{ sk_nrw_issuer }}\"",
"SK_NRW_PROVIDER_URL: \"{{ sk_nrw_provider_url }}\"",
"SK_NRW_CLIENT_ID: \"{{ sk_nrw_client_id }}\"",
"SK_NRW_CLIENT_SECRET: \"{{ sk_nrw_client_secret }}\"",
"RESOURCE_API: \"https://{{ stage_server_domain }}\"",
"REALM_ID: \"{{ current_realm_name | default('none') }}\"",
"REGISTRATION_ID: \"{{ wordpress_oidc_client_id | default('none') }}\"",
"CLIENT_ID: \"{{ wordpress_oidc_client_id | default('none') }}\"",
"CLIENT_SECRET: \"{{ wordpress_oidc_client_secret | default('none') }}\"",
"CLIENT_USERNAME: \"{{ wordpress_buergerportal_username | default('none') }}\"",
"CLIENT_PASSWORD: \"{{ wordpress_buergerportal_password | default('none') }}\"",
"SK_NRW_ISSUER: \"{{ sk_nrw_issuer | default('none') }}\"",
"SK_NRW_PROVIDER_URL: \"{{ sk_nrw_provider_url | default('none') }}\"",
"SK_NRW_CLIENT_ID: \"{{ sk_nrw_client_id | default('none') }}\"",
"SK_NRW_CLIENT_SECRET: \"{{ sk_nrw_client_secret | default('none') }}\"",
"SMARDIGO_AUTH_TOKEN_NAME: \"{{ smardigo_auth_token_name }}\"",
"SMARDIGO_AUTH_TOKEN_VALUE: \"{{ smardigo_auth_token_value }}\"",
],

@ -7,5 +7,5 @@ connect_admin_username: "connect-admin"
connect_admin_password: "connect-admin"
connect_mail_host: "{{ shared_service_mail_hostname }}"
connect_mail_properties_base_url: "{{ http_s }}://{{ stage_server_url_host }}"
connect_mail_properties_base_url_extern: "{{ http_s }}://{{ stage_server_url_host }}"
connect_mail_properties_base_url: "{{ http_s }}://{{ stage_server_domain }}"
connect_mail_properties_base_url_extern: "{{ http_s }}://{{ stage_server_domain }}"

@ -22,7 +22,7 @@
tasks_from: domain
vars:
record_data: "{{ stage_server_ip }}"
record_name: "{{ service_name }}"
record_name: "{{ connect_id }}"
- name: "Check if {{ connect_id }}/docker-compose.yml exists"
stat:
@ -88,23 +88,6 @@
tags:
- update_deployment
- name: "Update landing page entries for {{ connect_id }}"
include_role:
name: _deploy
tasks_from: caddy_landing_page
vars:
current_services: [
{
current_name: "{{ connect_id }}",
current_url: "{{ http_s }}://{{ connect_id }}.{{ domain }}",
current_version: "{{ connect_image_version }}",
current_date: "{{ ansible_date_time.iso8601 }}",
management: "{{ http_s }}://{{ connect_id }}.{{ domain }}:{{ monitor_port_service }}/management",
},
]
tags:
- update_deployment
- name: "Send mattermost messsge"
uri:
url: "{{ mattermost_hook_smardigo }}"

@ -1,18 +1,16 @@
---
connect_id: "{{ service_name }}-connect"
connect_labels: [
'"traefik.enable=true"',
'"traefik.http.routers.{{ connect_id }}.service={{ connect_id }}"',
'"traefik.http.routers.{{ connect_id }}.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ connect_id }}.rule=Host(`{{ connect_base_url }}`)"',
'"traefik.http.routers.{{ connect_id }}.entrypoints=websecure"',
'"traefik.http.routers.{{ connect_id }}.tls=true"',
'"traefik.http.routers.{{ connect_id }}.tls.certresolver=letsencrypt"',
'"traefik.http.services.{{ connect_id }}.loadbalancer.server.port={{ service_port }}"',
'"traefik.http.routers.{{ connect_id }}-admin.service={{ connect_id }}-admin"',
'"traefik.http.routers.{{ connect_id }}-admin.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ connect_id }}-admin.rule=Host(`{{ stage_server_domain }}`)"',
'"traefik.http.routers.{{ connect_id }}-admin.entrypoints=admin-service"',
'"traefik.http.routers.{{ connect_id }}-admin.tls=true"',
'"traefik.http.routers.{{ connect_id }}-admin.tls.certresolver=letsencrypt"',
@ -22,8 +20,8 @@ connect_labels: [
'"traefik.http.middlewares.{{ connect_id }}-admin-cors.headers.accesscontrolallowheaders=SMA_USER"',
'"traefik.http.services.{{ connect_id }}-admin.loadbalancer.server.port={{ management_port }}"',
'"traefik.http.routers.{{ connect_id }}-monitor.service={{ service_name }}-node-exporter"',
'"traefik.http.routers.{{ connect_id }}-monitor.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ connect_id }}-monitor.service={{ inventory_hostname }}-node-exporter"',
'"traefik.http.routers.{{ connect_id }}-monitor.rule=Host(`{{ stage_server_domain }}`)"',
'"traefik.http.routers.{{ connect_id }}-monitor.entrypoints=monitoring-system"',
'"traefik.http.routers.{{ connect_id }}-monitor.tls=true"',
'"traefik.http.routers.{{ connect_id }}-monitor.tls.certresolver=letsencrypt"',
@ -41,9 +39,9 @@ connect_environment: [
"SPRING_PROFILES_INCLUDE: \"{{ spring_profiles_include | default('swagger') }}\"",
"RIBBON_DISPLAY_ON_ACTIVE_PROFILES: \"{{ ribbon_display_on_active_profiles | default('dev') }}\"",
"DATASOURCE_URL: \"jdbc:postgresql://{{ connect_postgres_host }}:{{ service_port_postgres }}/{{ connect_postgres_database_name }}\"",
"DATASOURCE_USERNAME: \"{{ connect_postgres_admin_username }}\"",
"DATASOURCE_PASSWORD: \"{{ connect_postgres_admin_password }}\"",
"DATASOURCE_URL: \"jdbc:postgresql://{{ connect_postgres_host }}:{{ service_port_postgres }}/{{ connect_postgres_database }}\"",
"DATASOURCE_USERNAME: \"{{ connect_postgres_username }}\"",
"DATASOURCE_PASSWORD: \"{{ connect_postgres_password }}\"",
"FILE_WHITELIST_URL: \"{{ connect_whitelist_url | default('') }}\"",
"MAIL_PROTOCOL: \"{{ connect_mail_protocol | default('smtp') }}\"",
@ -81,7 +79,7 @@ connect_environment: [
"ELASTIC_ANALYSIS_INDEX: \"{{ connect_elastic_analysis_index | default('analysis') }}\"",
"SMA_ENABLE_WEBDAV_DOC_EDITING: \"true\"",
"SMA_WEBDAV_BASE_PATH: \"{{ http_s }}://{{ stage_server_url_host }}\"",
"SMA_WEBDAV_BASE_PATH: \"{{ http_s }}://{{ stage_server_domain }}\"",
"SMA_WEBDAV_HOST_URL: \"{{ http_s }}://{{ shared_service_webdav_hostname }}/\"",
"SMA_WEBDAV_FRONTEND_URL: \"{{ http_s }}://{{ shared_service_webdav_hostname }}/\"",
"SMA_WEBDAV_JWT_SECRET: \"{{ webdav_jwt_secret }}\"",

@ -1,14 +1,14 @@
---
elastic_id: "{{ service_name }}-elastic"
kibana_id: "{{ service_name }}-kibana"
logstash_id: "{{ service_name }}-logstash"
elastic_exporter_id: "{{ service_name }}-elastic-exporter"
elastic_id: "{{ inventory_hostname }}-elastic"
kibana_id: "{{ inventory_hostname }}-kibana"
logstash_id: "{{ inventory_hostname }}-logstash"
elastic_exporter_id: "{{ inventory_hostname }}-elastic-exporter"
kibana_labels: [
'"traefik.enable=true"',
'"traefik.http.routers.{{ kibana_id }}.service={{ kibana_id }}"',
'"traefik.http.routers.{{ kibana_id }}.rule=Host(`{{ stage_server_name }}-kibana.{{ domain }}`)"',
'"traefik.http.routers.{{ kibana_id }}.rule=Host(`{{ inventory_hostname }}-kibana.{{ domain }}`)"',
'"traefik.http.routers.{{ kibana_id }}.entrypoints=websecure"',
'"traefik.http.routers.{{ kibana_id }}.tls=true"',
'"traefik.http.routers.{{ kibana_id }}.tls.certresolver=letsencrypt"',
@ -104,7 +104,7 @@ elastic_docker: {
labels: "{{ kibana_labels + ( kibana_labels_additional | default([])) }}",
environment: [
"SERVER_NAME: {{ kibana_id }}",
"SERVER_PUBLICBASEURL: https://{{ stage_server_name }}-kibana.{{ domain }}",
"SERVER_PUBLICBASEURL: https://{{ inventory_hostname }}-kibana.{{ domain }}",
"ELASTICSEARCH_URL: https://{{ elastic_id }}:9200",
"ELASTICSEARCH_HOSTS: '[\"https://{{ elastic_id }}:9200\"]'",
"ELASTICSEARCH_SSL_CERTIFICATEAUTHORITIES: /usr/share/elasticsearch/config/certificates/ca/ca.crt",
@ -162,7 +162,7 @@ elastic_docker: {
labels: [
'"traefik.enable=true"',
'"traefik.http.routers.{{ elastic_exporter_id }}.service={{ elastic_exporter_id }}"',
'"traefik.http.routers.{{ elastic_exporter_id }}.rule=Host(`{{ service_name }}.{{ domain }}`)"',
'"traefik.http.routers.{{ elastic_exporter_id }}.rule=Host(`{{ inventory_hostname }}.{{ domain }}`)"',
'"traefik.http.routers.{{ elastic_exporter_id }}.entrypoints=monitoring-docker"',
'"traefik.http.routers.{{ elastic_exporter_id }}.tls=true"',
'"traefik.http.routers.{{ elastic_exporter_id }}.tls.certresolver=letsencrypt"',

@ -1,6 +1,6 @@
---
filebeat_id: "{{ service_name }}-filebeat"
filebeat_id: "{{ inventory_hostname }}-filebeat"
filebeat_docker: {
services: [

@ -15,25 +15,25 @@
when:
- send_status_messages
- name: "Setup DNS configuration for {{ service_name }} harbor"
- name: "Setup DNS configuration for {{ inventory_hostname }} harbor"
include_role:
name: _digitalocean
tasks_from: domain
vars:
record_data: "{{ stage_server_ip }}"
record_name: "{{ service_name }}"
record_name: "{{ inventory_hostname }}"
- name: 'Ensures {{ service_base_path }}/{{ service_name }} directory exists'
- name: 'Ensures {{ service_base_path }}/{{ inventory_hostname }} directory exists'
file:
state: directory
path: '{{ service_base_path }}/{{ service_name }}'
path: '{{ service_base_path }}/{{ inventory_hostname }}'
tags:
- update_deployment
- update_config
- name: 'Ensure directory structure for harbor exists'
file:
path: "{{ service_base_path }}/{{ service_name }}/{{ item.path }}"
path: "{{ service_base_path }}/{{ inventory_hostname }}/{{ item.path }}"
state: directory
owner: "{{ docker_owner }}"
group: "{{ docker_group }}"
@ -46,7 +46,7 @@
- name: Ensure config template files are populated from templates/harbor
template:
src: "{{ item.src }}"
dest: "{{ service_base_path }}/{{ service_name }}/{{ item.path | regex_replace('\\.j2$', '') }}"
dest: "{{ service_base_path }}/{{ inventory_hostname }}/{{ item.path | regex_replace('\\.j2$', '') }}"
owner: "{{ docker_owner }}"
group: "{{ docker_group }}"
mode: 0644
@ -58,7 +58,7 @@
- name: Ensure config files are populated from from templates/harbor
copy:
src: "{{ item.src }}"
dest: "{{ service_base_path }}/{{ service_name }}/{{ item.path }}"
dest: "{{ service_base_path }}/{{ inventory_hostname }}/{{ item.path }}"
owner: "{{ docker_owner }}"
group: "{{ docker_group }}"
mode: 0644
@ -70,25 +70,25 @@
- name: Download harbor offline installer
get_url:
url: https://github.com/goharbor/harbor/releases/download/{{ harbor_version }}/harbor-offline-installer-{{ harbor_version }}.tgz
dest: "{{ service_base_path }}/{{ service_name }}/harbor-offline-installer-{{ harbor_version }}.tgz"
dest: "{{ service_base_path }}/{{ inventory_hostname }}/harbor-offline-installer-{{ harbor_version }}.tgz"
- name: Extract harbor-offline-installer-{{ harbor_version }}.tgz into {{ service_base_path }}/{{ service_name }}
- name: Extract harbor-offline-installer-{{ harbor_version }}.tgz into {{ service_base_path }}/{{ inventory_hostname }}
ansible.builtin.unarchive:
src: "{{ service_base_path }}/{{ service_name }}/harbor-offline-installer-{{ harbor_version }}.tgz"
dest: "{{ service_base_path }}/{{ service_name }}"
src: "{{ service_base_path }}/{{ inventory_hostname }}/harbor-offline-installer-{{ harbor_version }}.tgz"
dest: "{{ service_base_path }}/{{ inventory_hostname }}"
remote_src: yes
- name: "Check if {{ service_name }}/harbor/docker-compose.yml exists"
- name: "Check if {{ inventory_hostname }}/harbor/docker-compose.yml exists"
stat:
path: '{{ service_base_path }}/harbor/{{ service_name }}/docker-compose.yml'
path: '{{ service_base_path }}/harbor/{{ inventory_hostname }}/docker-compose.yml'
register: check_docker_compose_file
tags:
- update_deployment
- name: "Stop {{ service_name }}"
- name: "Stop {{ inventory_hostname }}"
shell: docker-compose down
args:
chdir: '{{ service_base_path }}/{{ service_name }}/harbor'
chdir: '{{ service_base_path }}/{{ inventory_hostname }}/harbor'
when: check_docker_compose_file.stat.exists
ignore_errors: yes
tags:

@ -17,15 +17,15 @@
when:
- send_status_messages
- name: "Setup DNS configuration for {{ service_name }}"
- name: "Setup DNS configuration for {{ inventory_hostname }}"
include_role:
name: _digitalocean
tasks_from: domain
vars:
record_data: "{{ stage_server_ip }}"
record_name: "{{ service_name }}"
record_name: "{{ inventory_hostname }}"
- name: "Setup public DNS configuration for {{ service_name }}"
- name: "Setup public DNS configuration for {{ inventory_hostname }}"
include_role:
name: _digitalocean
tasks_from: domain
@ -40,72 +40,56 @@
name: _docker
tasks_from: networks
- name: "Check if {{ service_name }}/docker-compose.yml exists"
- name: "Check if {{ inventory_hostname }}/docker-compose.yml exists"
stat:
path: '{{ service_base_path }}/{{ service_name }}/docker-compose.yml'
path: '{{ service_base_path }}/{{ inventory_hostname }}/docker-compose.yml'
register: check_docker_compose_file
tags:
- update_deployment
- name: "Stop {{ service_name }}"
- name: "Stop {{ inventory_hostname }}"
shell: docker-compose down
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
when: check_docker_compose_file.stat.exists
ignore_errors: yes
tags:
- update_deployment
- name: "Deploy docker templates for {{ service_name }}"
- name: "Deploy docker templates for {{ inventory_hostname }}"
include_role:
name: _deploy
tasks_from: templates
vars:
current_config: "_docker"
current_base_path: "{{ service_base_path }}"
current_destination: "{{ service_name }}"
current_destination: "{{ inventory_hostname }}"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
current_docker: "{{ iam_docker }}"
- name: "Deploy service templates for {{ service_name }}"
- name: "Deploy service templates for {{ inventory_hostname }}"
include_role:
name: _deploy
tasks_from: templates
vars:
current_config: "iam"
current_base_path: "{{ service_base_path }}"
current_destination: "{{ service_name }}"
current_destination: "{{ inventory_hostname }}"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
- name: "Update {{ service_name }}"
- name: "Update {{ inventory_hostname }}"
shell: docker-compose pull
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
tags:
- update_deployment
- name: "Start {{ service_name }}"
- name: "Start {{ inventory_hostname }}"
shell: docker-compose up -d
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
tags:
- update_deployment
- name: "Update landing page for {{ service_name }}"
include_role:
name: _deploy
tasks_from: caddy_landing_page
vars:
current_services: [
{
current_name: "{{ service_name }}",
current_url: "{{ http_s }}://{{ iam_id }}.{{ domain }}",
current_version: "{{ iam_version }}",
current_date: "{{ ansible_date_time.iso8601 }}",
},
]
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
tags:
- update_deployment

@ -1,6 +1,6 @@
---
iam_id: "{{ service_name }}-iam"
iam_id: "{{ inventory_hostname }}-iam"
iam_cache_timeout: 600s
@ -11,14 +11,14 @@ iam_keycloak_admin_password: "{{ keycloak_admin_password }}"
iam_labels: [
'"traefik.enable=true"',
'"traefik.http.routers.{{ iam_id }}.service={{ iam_id }}"',
'"traefik.http.routers.{{ iam_id }}.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ iam_id }}.rule=Host(`{{ stage_server_domain }}`)"',
'"traefik.http.routers.{{ iam_id }}.entrypoints=websecure"',
'"traefik.http.routers.{{ iam_id }}.tls=true"',
'"traefik.http.routers.{{ iam_id }}.tls.certresolver=letsencrypt"',
'"traefik.http.services.{{ iam_id }}.loadbalancer.server.port={{ service_port }}"',
'"traefik.http.routers.{{ iam_id }}-admin.service={{ iam_id }}-admin"',
'"traefik.http.routers.{{ iam_id }}-admin.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ iam_id }}-admin.rule=Host(`{{ stage_server_domain }}`)"',
'"traefik.http.routers.{{ iam_id }}-admin.entrypoints=admin-service"',
'"traefik.http.routers.{{ iam_id }}-admin.tls=true"',
'"traefik.http.routers.{{ iam_id }}-admin.tls.certresolver=letsencrypt"',
@ -28,8 +28,8 @@ iam_labels: [
'"traefik.http.middlewares.{{ iam_id }}-admin-cors.headers.accesscontrolallowheaders=SMA_USER"',
'"traefik.http.services.{{ iam_id }}-admin.loadbalancer.server.port={{ management_port }}"',
'"traefik.http.routers.{{ iam_id }}-monitor.service={{ service_name }}-node-exporter"',
'"traefik.http.routers.{{ iam_id }}-monitor.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ iam_id }}-monitor.service={{ inventory_hostname }}-node-exporter"',
'"traefik.http.routers.{{ iam_id }}-monitor.rule=Host(`{{ stage_server_domain }}`)"',
'"traefik.http.routers.{{ iam_id }}-monitor.entrypoints=monitoring-system"',
'"traefik.http.routers.{{ iam_id }}-monitor.tls=true"',
'"traefik.http.routers.{{ iam_id }}-monitor.tls.certresolver=letsencrypt"',

@ -1,3 +1,3 @@
---
upload_directory: /tmp
upload_directory: /tmp

@ -0,0 +1,6 @@
---
postgres_acls:
- name: "{{ keycloak_postgres_database }}"
password: "{{ keycloak_postgres_password }}"
trusted_cidr_entry: "{{ shared_service_network }}"

@ -0,0 +1,35 @@
---
### tags:
### update_deployment
- name: "Send mattermost message"
uri:
url: "{{ mattermost_hook_smardigo }}"
method: POST
body: "{{ lookup('template','mattermost-deploy-start.json.j2') }}"
body_format: json
headers:
Content-Type: "application/json"
delegate_to: 127.0.0.1
become: false
when:
- send_status_messages
- name: "Setup postgres for {{ inventory_hostname }}"
include_role:
name: postgres
tasks_from: _postgres-acls
- name: "Send mattermost messsge"
uri:
url: "{{ mattermost_hook_smardigo }}"
method: POST
body: "{{ lookup('template','mattermost-deploy-end.json.j2') }}"
body_format: json
headers:
Content-Type: "application/json"
delegate_to: 127.0.0.1
become: false
when:
- send_status_messages

@ -6,8 +6,3 @@ service_port_keycloak_external: "8110"
keycloak_version: "12.0.4"
keycloak_admin_username: "keycloak-admin"
keycloak_admin_password: "keycloak-admin"
keycloak_postgres_version: "12"
keycloak_postgres_database: "keycloak-postgres"
keycloak_postgres_admin_username: "keycloak-postgres-admin"
keycloak_postgres_admin_password: "keycloak-postgres-admin"

@ -6,7 +6,7 @@
when:
- debug
- name: Create client {{ client_id }} for realm {{ realm_name }}
- name: "Creating client <{{ client_id }}> for realm <{{ realm_name }}>"
uri:
url: "{{ keycloak_server_url }}/auth/admin/realms/{{ realm_name }}/clients"
method: POST
@ -15,5 +15,6 @@
headers:
Authorization: "Bearer {{ access_token}} "
status_code: [201]
changed_when: True
when: realm_client_ids | selectattr('clientId', 'equalto', client_id) | list | length == 0
delegate_to: 127.0.0.1

@ -19,80 +19,64 @@
when:
- send_status_messages
- name: "Setup DNS configuration for {{ service_name }}"
- name: "Setup DNS configuration for {{ inventory_hostname }}"
include_role:
name: _digitalocean
tasks_from: domain
vars:
record_data: "{{ stage_server_ip }}"
record_name: "{{ service_name }}"
record_name: "{{ inventory_hostname }}"
- name: "Check if {{ service_name }}/docker-compose.yml exists"
- name: "Check if {{ inventory_hostname }}/docker-compose.yml exists"
stat:
path: '{{ service_base_path }}/{{ service_name }}/docker-compose.yml'
path: '{{ service_base_path }}/{{ inventory_hostname }}/docker-compose.yml'
register: check_docker_compose_file
tags:
- update_deployment
- name: "Stop {{ service_name }}"
- name: "Stop {{ inventory_hostname }}"
shell: docker-compose down
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
when: check_docker_compose_file.stat.exists
ignore_errors: yes
tags:
- update_deployment
- name: "Deploy docker templates for {{ service_name }}"
- name: "Deploy docker templates for {{ inventory_hostname }}"
include_role:
name: _deploy
tasks_from: templates
vars:
current_config: "_docker"
current_base_path: "{{ service_base_path }}"
current_destination: "{{ service_name }}"
current_destination: "{{ inventory_hostname }}"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
current_docker: "{{ keycloak_docker }}"
- name: "Deploy service templates for {{ service_name }}"
- name: "Deploy service templates for {{ inventory_hostname }}"
include_role:
name: _deploy
tasks_from: templates
vars:
current_config: "keycloak"
current_base_path: "{{ service_base_path }}"
current_destination: "{{ service_name }}"
current_destination: "{{ inventory_hostname }}"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
- name: "Update {{ service_name }}"
- name: "Update {{ inventory_hostname }}"
shell: docker-compose pull
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
tags:
- update_deployment
- name: "Start {{ service_name }}"
- name: "Start {{ inventory_hostname }}"
shell: docker-compose up -d
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
tags:
- update_deployment
- name: "Update landing page for {{ service_name }}"
include_role:
name: _deploy
tasks_from: caddy_landing_page
vars:
current_services: [
{
current_name: "{{ service_name }}",
current_url: "{{ http_s }}://{{ keycloak_id }}.{{ domain }}",
current_version: "{{ keycloak_version }}",
current_date: "{{ ansible_date_time.iso8601 }}",
},
]
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
tags:
- update_deployment

@ -96,7 +96,7 @@
},
"smtpServer": {
"host": "{{ shared_service_mail_hostname }}",
"from": "{{ service_name }}@{{ shared_service_mail_hostname }}"
"from": "{{ inventory_hostname }}@{{ shared_service_mail_hostname }}"
},
"loginTheme": "smardigo-theme",
"accountTheme": "smardigo-theme",

@ -1,19 +1,19 @@
---
keycloak_id: "{{ service_name }}-keycloak"
keycloak_postgres_id: "{{ service_name }}-postgres-keycloak"
keycloak_id: "{{ inventory_hostname }}-keycloak"
keycloak_postgres_id: "{{ inventory_hostname }}-postgres-keycloak"
keycloak_labels: [
'"traefik.enable=true"',
'"traefik.http.routers.{{ keycloak_id }}.service={{ keycloak_id }}"',
'"traefik.http.routers.{{ keycloak_id }}.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ keycloak_id }}.rule=Host(`{{ stage_server_domain }}`)"',
'"traefik.http.routers.{{ keycloak_id }}.entrypoints=websecure"',
'"traefik.http.routers.{{ keycloak_id }}.tls=true"',
'"traefik.http.routers.{{ keycloak_id }}.tls.certresolver=letsencrypt"',
'"traefik.http.services.{{ keycloak_id }}.loadbalancer.server.port={{ service_port }}"',
'"traefik.http.routers.{{ keycloak_id }}-monitor.service={{ service_name }}-node-exporter"',
'"traefik.http.routers.{{ keycloak_id }}-monitor.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ keycloak_id }}-monitor.service={{ inventory_hostname }}-node-exporter"',
'"traefik.http.routers.{{ keycloak_id }}-monitor.rule=Host(`{{ stage_server_domain }}`)"',
'"traefik.http.routers.{{ keycloak_id }}-monitor.entrypoints=monitoring-system"',
'"traefik.http.routers.{{ keycloak_id }}-monitor.tls=true"',
'"traefik.http.routers.{{ keycloak_id }}-monitor.tls.certresolver=letsencrypt"',
@ -21,20 +21,11 @@ keycloak_labels: [
keycloak_docker: {
networks: [
{
name: back-tier,
external: true,
},
{
name: front-tier,
external: true,
},
],
volumes: [
{
name: "{{ keycloak_postgres_id }}-data"
}
],
services: [
{
name: "{{ keycloak_id }}",
@ -49,9 +40,9 @@ keycloak_docker: {
"DB_VENDOR: postgres",
"DB_DATABASE: \"{{ keycloak_postgres_database }}\"",
"DB_USER: \"{{ keycloak_postgres_admin_username }}\"",
"DB_PASSWORD: \"{{ keycloak_postgres_admin_password }}\"",
"DB_ADDR: \"{{ keycloak_postgres_id }}\"",
"DB_USER: \"{{ keycloak_postgres_username }}\"",
"DB_PASSWORD: \"{{ keycloak_postgres_password }}\"",
"DB_ADDR: \"{{ keycloak_postgres_host }}\"",
"JAVA_OPTS_APPEND: \"-Dkeycloak.profile.feature.docker=enabled\"",
],
@ -60,7 +51,6 @@ keycloak_docker: {
'"./smardigo-theme:/opt/jboss/keycloak/themes/smardigo-theme:ro"',
],
networks: [
'"back-tier"',
'"front-tier"',
],
ports: [
@ -70,23 +60,6 @@ keycloak_docker: {
},
],
extra_hosts: "{{ keycloak_extra_hosts | default([]) }}",
},
{
name: "{{ keycloak_postgres_id }}",
image_name: "postgres",
image_version: "{{ keycloak_postgres_version }}",
environment: [
'POSTGRES_DB: "{{ keycloak_postgres_database }}"',
'POSTGRES_USER: "{{ keycloak_postgres_admin_username }}"',
'POSTGRES_PASSWORD: "{{ keycloak_postgres_admin_password }}"',
],
volumes: [
'"{{ keycloak_postgres_id }}-data:/var/lib/postgresql/data"',
],
networks: [
'"back-tier"',
],
ports: "{{ keycloak_postgres_ports | default([]) }}",
},
}
],
}

@ -0,0 +1,6 @@
---
postgres_acls:
- name: "{{ management_connect_postgres_database }}"
password: "{{ management_connect_postgres_password }}"
trusted_cidr_entry: "{{ shared_service_network }}"

@ -0,0 +1,32 @@
---
- name: "Send mattermost message"
uri:
url: "{{ mattermost_hook_smardigo }}"
method: POST
body: "{{ lookup('template','mattermost-deploy-start.json.j2') }}"
body_format: json
headers:
Content-Type: "application/json"
delegate_to: 127.0.0.1
become: false
when:
- send_status_messages
- name: "Setup postgres for {{ inventory_hostname }}"
include_role:
name: postgres
tasks_from: _postgres-acls
- name: "Send mattermost messsge"
uri:
url: "{{ mattermost_hook_smardigo }}"
method: POST
body: "{{ lookup('template','mattermost-deploy-end.json.j2') }}"
body_format: json
headers:
Content-Type: "application/json"
delegate_to: 127.0.0.1
become: false
when:
- send_status_messages

@ -14,7 +14,7 @@
- send_status_messages
- name: Ensure MySQL databases are present.
mysql_db:
community.mysql.mysql_db:
name: "{{ item.name }}"
collation: "{{ item.collation | default('utf8_general_ci') }}"
encoding: "{{ item.encoding | default('utf8') }}"
@ -24,7 +24,7 @@
with_items: "{{ mysql_databases }}"
- name: Ensure MySQL users are present.
mysql_user:
community.mysql.mysql_user:
name: "{{ item.name }}"
password: "{{ item.password }}"
priv: "{{ item.priv | default('*.*:USAGE') }}"

@ -44,7 +44,7 @@
register: root_pwd_check
- name: Set MariaDB root password for the first time
mysql_user:
community.mysql.mysql_user:
name: root
password: "{{ mysql_root_password }}"
host_all: yes
@ -53,7 +53,7 @@
when: root_pwd_check.rc == 0
- name: Ensure MySQL databases are present.
mysql_db:
community.mysql.mysql_db:
name: "{{ item.name }}"
collation: "{{ item.collation | default('utf8_general_ci') }}"
encoding: "{{ item.encoding | default('utf8') }}"
@ -63,7 +63,7 @@
with_items: "{{ mysql_databases }}"
- name: Ensure MySQL users are present.
mysql_user:
community.mysql.mysql_user:
name: "{{ item.name }}"
password: "{{ item.password }}"
priv: "{{ item.priv | default('*.*:USAGE') }}"

@ -1,6 +1,6 @@
---
node_exporter_id: "{{ service_name }}-node-exporter"
node_exporter_id: "{{ inventory_hostname }}-node-exporter"
node_exporter_docker: {
networks: [
@ -46,7 +46,7 @@ node_exporter_docker: {
labels: [
'"traefik.enable=true"',
'"traefik.http.routers.{{ node_exporter_id }}.service={{ node_exporter_id }}"',
'"traefik.http.routers.{{ node_exporter_id }}.rule=Host(`{{ service_name }}.{{ domain }}`)"',
'"traefik.http.routers.{{ node_exporter_id }}.rule=Host(`{{ inventory_hostname }}.{{ domain }}`)"',
'"traefik.http.routers.{{ node_exporter_id }}.entrypoints=monitoring-system"',
'"traefik.http.routers.{{ node_exporter_id }}.tls=true"',
'"traefik.http.routers.{{ node_exporter_id }}.tls.certresolver=letsencrypt"',

@ -1,6 +1,6 @@
---
pgadmin_id: "{{ service_name }}-pgadmin"
pgadmin_id: "{{ inventory_hostname }}-pgadmin"
pgadmin4_docker: {
networks: [
@ -26,7 +26,7 @@ pgadmin4_docker: {
labels: [
'"traefik.enable=true"',
'"traefik.http.routers.{{ pgadmin_id }}.service={{ pgadmin_id }}"',
'"traefik.http.routers.{{ pgadmin_id }}.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ pgadmin_id }}.rule=Host(`{{ stage_server_domain }}`)"',
'"traefik.http.routers.{{ pgadmin_id }}.entrypoints=admin-postgres"',
'"traefik.http.routers.{{ pgadmin_id }}.tls=true"',
'"traefik.http.routers.{{ pgadmin_id }}.tls.certresolver=letsencrypt"',

@ -1,4 +0,0 @@
---
postgres_image_name: "postgres"
postgres_image_version: "12"

@ -1,83 +0,0 @@
---
### tags:
### update_deployment
- name: "Send mattermost message"
uri:
url: "{{ mattermost_hook_smardigo }}"
method: POST
body: "{{ lookup('template','mattermost-deploy-start.json.j2') }}"
body_format: json
headers:
Content-Type: "application/json"
delegate_to: 127.0.0.1
become: false
when:
- send_status_messages
- name: "Check if {{ postgres_id }}/docker-compose.yml exists"
stat:
path: '{{ service_base_path }}/{{ postgres_id }}/docker-compose.yml'
register: check_docker_compose_file
tags:
- update_deployment
- name: "Stop {{ postgres_id }}"
shell: docker-compose down
args:
chdir: '{{ service_base_path }}/{{ postgres_id }}'
when: check_docker_compose_file.stat.exists
ignore_errors: yes
tags:
- update_deployment
- name: "Deploy docker templates for {{ postgres_id }}"
include_role:
name: _deploy
tasks_from: templates
vars:
current_config: "_docker"
current_base_path: "{{ service_base_path }}"
current_destination: "{{ postgres_id }}"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
current_docker: "{{ postgres_docker }}"
- name: "Deploy service templates for {{ postgres_id }}"
include_role:
name: _deploy
tasks_from: templates
vars:
current_config: "connect"
current_base_path: "{{ service_base_path }}"
current_destination: "{{ postgres_id }}"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
- name: "Update {{ postgres_id }}"
shell: docker-compose pull
args:
chdir: '{{ service_base_path }}/{{ postgres_id }}'
tags:
- update_deployment
- name: "Start {{ postgres_id }}"
shell: docker-compose up -d
args:
chdir: '{{ service_base_path }}/{{ postgres_id }}'
tags:
- update_deployment
- name: "Send mattermost messsge"
uri:
url: "{{ mattermost_hook_smardigo }}"
method: POST
body: "{{ lookup('template','mattermost-deploy-end.json.j2') }}"
body_format: json
headers:
Content-Type: "application/json"
delegate_to: 127.0.0.1
become: false
when:
- send_status_messages

@ -1,36 +0,0 @@
---
postgres_id: "{{ service_name }}-postgres_{{ cluster_service }}"
postgres_docker: {
networks: [
{
name: back-tier,
external: true,
}
],
volumes: [
{
name: "{{ postgres_id }}-data"
}
],
services: [
{
name: "{{ postgres_id }}",
image_name: "{{ postgres_image_name }}",
image_version: "{{ postgres_image_version }}",
environment: [
"POSTGRES_DB: \"{{ hostvars[inventory_hostname][cluster_service + '_postgres_database'] | default('postgres') }}\"",
"POSTGRES_USER: \"{{ hostvars[inventory_hostname][cluster_service + '_postgres_admin_username'] | default('postgres-admin') }}\"",
"POSTGRES_PASSWORD: \"{{ hostvars[inventory_hostname][cluster_service + '_postgres_admin_password'] | default('postgres-admin') }}\"",
],
volumes: [
'"{{ postgres_id }}-data:/var/lib/postgresql/data"',
],
networks: [
'"back-tier"',
],
ports: "{{ postgres_ports | default([]) }}",
},
],
}

@ -17,55 +17,55 @@
when:
- send_status_messages
- name: "Setup DNS configuration for {{ service_name }} prometheus"
- name: "Setup DNS configuration for {{ inventory_hostname }} prometheus"
include_role:
name: _digitalocean
tasks_from: domain
vars:
record_data: "{{ stage_server_ip }}"
record_name: "{{ service_name }}-prometheus"
record_name: "{{ inventory_hostname }}-prometheus"
- name: "Setup DNS configuration for {{ service_name }} grafana"
- name: "Setup DNS configuration for {{ inventory_hostname }} grafana"
include_role:
name: _digitalocean
tasks_from: domain
vars:
record_data: "{{ stage_server_ip }}"
record_name: "{{ service_name }}-grafana"
record_name: "{{ inventory_hostname }}-grafana"
- name: "Setup DNS configuration for {{ service_name }} alertmanager"
- name: "Setup DNS configuration for {{ inventory_hostname }} alertmanager"
include_role:
name: _digitalocean
tasks_from: domain
vars:
record_data: "{{ stage_server_ip }}"
record_name: "{{ service_name }}-alertmanager"
record_name: "{{ inventory_hostname }}-alertmanager"
- name: "Check if {{ service_name }}/docker-compose.yml exists"
- name: "Check if {{ inventory_hostname }}/docker-compose.yml exists"
stat:
path: '{{ service_base_path }}/{{ service_name }}/docker-compose.yml'
path: '{{ service_base_path }}/{{ inventory_hostname }}/docker-compose.yml'
register: check_docker_compose_file
tags:
- update_config
- update_deployment
- name: "Stop {{ service_name }}"
- name: "Stop {{ inventory_hostname }}"
shell: docker-compose down
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
when: check_docker_compose_file.stat.exists
ignore_errors: yes
tags:
- update_deployment
- name: "Deploy docker templates for {{ service_name }}"
- name: "Deploy docker templates for {{ inventory_hostname }}"
include_role:
name: _deploy
tasks_from: templates
vars:
current_config: "_docker"
current_base_path: "{{ service_base_path }}"
current_destination: "{{ service_name }}"
current_destination: "{{ inventory_hostname }}"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
current_docker: "{{ prometheus_docker }}"
@ -119,65 +119,39 @@
tags:
- update_config
- name: "Deploy service templates for {{ service_name }}"
- name: "Deploy service templates for {{ inventory_hostname }}"
include_role:
name: _deploy
tasks_from: templates
vars:
current_config: "prometheus"
current_base_path: "{{ service_base_path }}"
current_destination: "{{ service_name }}"
current_destination: "{{ inventory_hostname }}"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
tags:
- update_config
- name: "Update {{ service_name }}"
- name: "Update {{ inventory_hostname }}"
shell: docker-compose pull
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
tags:
- update_deployment
- name: "Start {{ service_name }}"
- name: "Start {{ inventory_hostname }}"
shell: |
docker-compose down
docker-compose up -d
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
tags:
- update_config
- update_deployment
- name: "Update landing page for {{ service_name }}"
include_role:
name: _deploy
tasks_from: caddy_landing_page
vars:
current_services: [
{
current_name: "prometheus",
current_url: "{{ http_s}}://{{ service_name }}-prometheus.{{ domain }}",
current_version: "{{ prometheus_image_version }}",
current_date: "{{ ansible_date_time.iso8601 }}",
},
{
current_name: "grafana",
current_url: "{{ http_s }}://{{ service_name }}-grafana.{{ domain }}",
current_version: "{{ grafana_image_version }}",
current_date: "{{ ansible_date_time.iso8601 }}",
},
{
current_name: "alertmanager",
current_url: "{{ http_s }}://{{ service_name }}-alertmanager.{{ domain }}",
current_version: "{{ alertmanager_image_version }}",
current_date: "{{ ansible_date_time.iso8601 }}",
},
]
- name: "Wait for {{ http_s }}://{{ service_name }}-grafana.{{ domain }}"
- name: "Wait for {{ http_s }}://{{ inventory_hostname }}-grafana.{{ domain }}"
uri:
url: "{{ http_s }}://{{ service_name }}-grafana.{{ domain }}/api/admin/stats"
url: "{{ http_s }}://{{ inventory_hostname }}-grafana.{{ domain }}/api/admin/stats"
url_username: "{{ grafana_admin_username }}"
url_password: "{{ grafana_admin_password }}"
force_basic_auth: yes
@ -191,7 +165,7 @@
- name: Create grafana users
uri:
url: "{{ http_s }}://{{ service_name }}-grafana.{{ domain }}/api/admin/users"
url: "{{ http_s }}://{{ inventory_hostname }}-grafana.{{ domain }}/api/admin/users"
url_username: "{{ grafana_admin_username }}"
url_password: "{{ grafana_admin_password }}"
force_basic_auth: yes

@ -4,9 +4,9 @@ service_port_grafana: 3000
service_port_prometheus: 9090
service_port_alertmanager: 9093
prometheus_id: "{{ service_name }}-prometheus"
alertmanager_id: "{{ service_name }}-alertmanager"
grafana_id: "{{ service_name }}-grafana"
prometheus_id: "{{ inventory_hostname }}-prometheus"
alertmanager_id: "{{ inventory_hostname }}-alertmanager"
grafana_id: "{{ inventory_hostname }}-grafana"
prometheus_docker: {
networks: [
@ -38,7 +38,7 @@ prometheus_docker: {
labels: [
'"traefik.enable=true"',
'"traefik.http.routers.{{ prometheus_id }}.service={{ prometheus_id }}"',
'"traefik.http.routers.{{ prometheus_id }}.rule=Host(`{{ service_name }}-prometheus.{{ domain }}`)"',
'"traefik.http.routers.{{ prometheus_id }}.rule=Host(`{{ inventory_hostname }}-prometheus.{{ domain }}`)"',
'"traefik.http.routers.{{ prometheus_id }}.entrypoints=websecure"',
'"traefik.http.routers.{{ prometheus_id }}.tls=true"',
'"traefik.http.routers.{{ prometheus_id }}.tls.certresolver=letsencrypt"',
@ -49,7 +49,7 @@ prometheus_docker: {
'"--storage.tsdb.path=/prometheus"',
'"--web.console.libraries=/usr/share/prometheus/console_libraries"',
'"--web.console.templates=/usr/share/prometheus/consoles"',
'"--web.external-url={{ http_s}}://{{ service_name }}-prometheus.{{ domain }}"',
'"--web.external-url={{ http_s}}://{{ inventory_hostname }}-prometheus.{{ domain }}"',
'"--web.enable-lifecycle"',
'"--storage.tsdb.retention.time=15w"',
],
@ -70,7 +70,7 @@ prometheus_docker: {
labels: [
'"traefik.enable=true"',
'"traefik.http.routers.{{ alertmanager_id }}.service={{ alertmanager_id }}"',
'"traefik.http.routers.{{ alertmanager_id }}.rule=Host(`{{ service_name }}-alertmanager.{{ domain }}`)"',
'"traefik.http.routers.{{ alertmanager_id }}.rule=Host(`{{ inventory_hostname }}-alertmanager.{{ domain }}`)"',
'"traefik.http.routers.{{ alertmanager_id }}.entrypoints=websecure"',
'"traefik.http.routers.{{ alertmanager_id }}.tls=true"',
'"traefik.http.routers.{{ alertmanager_id }}.tls.certresolver=letsencrypt"',
@ -79,7 +79,7 @@ prometheus_docker: {
command: [
'"--config.file=/etc/alertmanager/config.yml"',
'"--storage.path=/alertmanager"',
'"--web.external-url={{ http_s}}://{{ service_name }}-alertmanager.{{ domain }}"',
'"--web.external-url={{ http_s}}://{{ inventory_hostname }}-alertmanager.{{ domain }}"',
],
environment: [
'LS_JAVA_OPTS: "-Xmx1G -Xms1G"',
@ -101,7 +101,7 @@ prometheus_docker: {
labels: [
'"traefik.enable=true"',
'"traefik.http.routers.{{ grafana_id }}.service={{ grafana_id }}"',
'"traefik.http.routers.{{ grafana_id }}.rule=Host(`{{ service_name }}-grafana.{{ domain }}`)"',
'"traefik.http.routers.{{ grafana_id }}.rule=Host(`{{ inventory_hostname }}-grafana.{{ domain }}`)"',
'"traefik.http.routers.{{ grafana_id }}.entrypoints=websecure"',
'"traefik.http.routers.{{ grafana_id }}.tls=true"',
'"traefik.http.routers.{{ grafana_id }}.tls.certresolver=letsencrypt"',

@ -57,27 +57,6 @@
group: "{{ docker_group }}"
mode: '0600'
- name: "Update landing page"
include_role:
name: _deploy
tasks_from: caddy_landing_page
vars:
current_services: []
- name: "Update landing page for traefik"
include_role:
name: _deploy
tasks_from: caddy_landing_page
vars:
current_services: [
{
current_name: "traefik",
current_url: "{{ http_s }}://{{ stage_server_url_host }}:{{ admin_port_traefik }}",
current_version: "{{ traefik_image_version }}",
current_date: "{{ ansible_date_time.iso8601 }}",
},
]
- name: "Update traefik"
shell: docker-compose pull
args:

@ -1,6 +1,6 @@
---
traefik_id: "{{ service_name }}-traefik"
traefik_id: "{{ inventory_hostname }}-traefik"
traefik_docker: {
networks: [

@ -1,6 +1,6 @@
---
postgres_acls:
- name: "{{ webdav_postgres_database_name }}"
password: "{{ webdav_postgres_admin_password }}"
- name: "{{ webdav_postgres_database }}"
password: "{{ webdav_postgres_password }}"
trusted_cidr_entry: "{{ shared_service_network }}"

@ -16,7 +16,7 @@
when:
- send_status_messages
- name: "Setup postgres for {{ service_name }}"
- name: "Setup postgres for {{ inventory_hostname }}"
include_role:
name: postgres
tasks_from: _postgres-acls

@ -15,24 +15,24 @@
- name: "Check if webdav/docker-compose.yml exists"
stat:
path: '{{ service_base_path }}/{{ service_name }}/docker-compose.yml'
path: '{{ service_base_path }}/{{ inventory_hostname }}/docker-compose.yml'
register: check_docker_compose_file
- name: "Stop webdav"
shell: docker-compose down
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
when: check_docker_compose_file.stat.exists
ignore_errors: yes
- name: "Deploy docker templates for {{ service_name }}"
- name: "Deploy docker templates for {{ inventory_hostname }}"
include_role:
name: _deploy
tasks_from: templates
vars:
current_config: "_docker"
current_base_path: "{{ service_base_path }}"
current_destination: "{{ service_name }}"
current_destination: "{{ inventory_hostname }}"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
current_docker: "{{ webdav_docker }}"
@ -44,21 +44,21 @@
vars:
current_config: "webdav"
current_base_path: "{{ service_base_path }}"
current_destination: "{{ service_name }}"
current_destination: "{{ inventory_hostname }}"
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
- name: "Update {{ service_name }}"
- name: "Update {{ inventory_hostname }}"
shell: docker-compose pull
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
tags:
- update_deployment
- name: "Start {{ service_name }}"
- name: "Start {{ inventory_hostname }}"
shell: docker-compose up -d
args:
chdir: '{{ service_base_path }}/{{ service_name }}'
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
- name: "Send mattermost messsge"
uri:

@ -1,18 +1,18 @@
---
webdav_id: "{{ service_name }}-webdav"
webdav_id: "{{ inventory_hostname }}-webdav"
webdav_labels: [
'"traefik.enable=true"',
'"traefik.http.routers.{{ webdav_id }}.service={{ webdav_id }}"',
'"traefik.http.routers.{{ webdav_id }}.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ webdav_id }}.rule=Host(`{{ stage_server_domain }}`)"',
'"traefik.http.routers.{{ webdav_id }}.entrypoints=websecure"',
'"traefik.http.routers.{{ webdav_id }}.tls=true"',
'"traefik.http.routers.{{ webdav_id }}.tls.certresolver=letsencrypt"',
'"traefik.http.services.{{ webdav_id }}.loadbalancer.server.port={{ service_port_webdav }}"',
'"traefik.http.routers.{{ webdav_id }}-admin.service={{ webdav_id }}-admin"',
'"traefik.http.routers.{{ webdav_id }}-admin.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ webdav_id }}-admin.rule=Host(`{{ stage_server_domain }}`)"',
'"traefik.http.routers.{{ webdav_id }}-admin.entrypoints=admin-service"',
'"traefik.http.routers.{{ webdav_id }}-admin.tls=true"',
'"traefik.http.routers.{{ webdav_id }}-admin.tls.certresolver=letsencrypt"',
@ -22,8 +22,8 @@ webdav_labels: [
'"traefik.http.middlewares.{{ webdav_id }}-admin-cors.headers.accesscontrolallowheaders=SMA_USER"',
'"traefik.http.services.{{ webdav_id }}-admin.loadbalancer.server.port={{ management_port }}"',
'"traefik.http.routers.{{ webdav_id }}-monitor.service={{ service_name }}-node-exporter"',
'"traefik.http.routers.{{ webdav_id }}-monitor.rule=Host(`{{ stage_server_url_host }}`)"',
'"traefik.http.routers.{{ webdav_id }}-monitor.service={{ inventory_hostname }}-node-exporter"',
'"traefik.http.routers.{{ webdav_id }}-monitor.rule=Host(`{{ stage_server_domain }}`)"',
'"traefik.http.routers.{{ webdav_id }}-monitor.entrypoints=monitoring-system"',
'"traefik.http.routers.{{ webdav_id }}-monitor.tls=true"',
'"traefik.http.routers.{{ webdav_id }}-monitor.tls.certresolver=letsencrypt"',
@ -46,9 +46,9 @@ webdav_docker: {
user: root,
environment: [
"SPRING_PROFILES_INCLUDE: \"swagger,postgres\"",
"DATASOURCE_URL: \"jdbc:postgresql://{{ webdav_postgres_host }}:{{ service_port_postgres }}/{{ webdav_postgres_database_name }}\"",
"DATASOURCE_USERNAME: \"{{ webdav_postgres_admin_username }}\"",
"DATASOURCE_PASSWORD: \"{{ webdav_postgres_admin_password }}\"",
"DATASOURCE_URL: \"jdbc:postgresql://{{ webdav_postgres_host }}:{{ service_port_postgres }}/{{ webdav_postgres_database }}\"",
"DATASOURCE_USERNAME: \"{{ webdav_postgres_username }}\"",
"DATASOURCE_PASSWORD: \"{{ webdav_postgres_password }}\"",
"SMA_JWT_SECRET: \"{{ webdav_jwt_secret }}\""
],
networks: [

@ -6,7 +6,7 @@
# tenant_id := (unique key for the tenant, e.g. customer)
# cluster_name := (business name for the cluster, e.g. product, department )
# cluster_size := (WIP node count for the cluster)
# cluster_service := (service to setup, e.g. connect)
# cluster_services := (services to setup, e.g. ['connect', 'wordpress', ...])
# smardigo message callback
# scope_id := (scope id of the management process)
# process_instance_id := (process instance id of the management process)
@ -35,51 +35,9 @@
groups:
- "stage_{{ stage }}"
- "{{ cluster_service }}"
with_sequence: start=1 end={{ cluster_size | default(1) }}
with_sequence: start=1 end={{ cluster_count | default(1) }}
changed_when: False
#############################################################
# Setup databases for created inventory
#############################################################
- hosts: "stage_{{ stage }}"
serial: "{{ serial_number | default(1) }}"
remote_user: root
pre_tasks:
- name: "Gathering current server infos from hetzner"
hcloud_server_info:
api_token: "{{ hetzner_authentication_token }}"
register: hetzner_server_infos
delegate_to: 127.0.0.1
become: false
- name: "Setting current server infos as fact: hetzner_server_infos_json"
set_fact:
hetzner_server_infos_json: "{{ hetzner_server_infos.hcloud_server_info }}"
delegate_to: 127.0.0.1
become: false
- name: "Reading ip address for {{ inventory_hostname }}"
set_fact:
stage_server_ip: "{{ hetzner_server_infos_json | json_query(querystr) | first }}"
vars:
querystr: "[?name=='{{ inventory_hostname }}'].ipv4_address"
delegate_to: 127.0.0.1
become: false
- name: "Printing ip address for {{ inventory_hostname }}"
debug:
msg: "{{ stage_server_ip }}"
delegate_to: 127.0.0.1
become: false
when:
- debug
roles:
- role: postgres-container
when: "'postgres' in group_names"
#############################################################
# Sending smardigo management message to process
#############################################################
@ -101,6 +59,6 @@
Content-Type: "application/json"
Smardigo-User-Token: "{{ smardigo_management_token }}"
status_code: [200]
delegate_to: 127.0.0.1
retries: 5
delay: 5
delegate_to: 127.0.0.1

@ -7,71 +7,10 @@
"menuItems" : [ {
"name" : "Provisioning",
"tabName" : "Provisioning",
"logoId" : "cube",
"logoId" : "local_shipping",
"configType" : "process-search",
"processDefinitionKey" : "simple-connect",
"processDefinitionKeys" : [ "simple-connect" ],
"groups" : [ "user", "head", "maintainer", "administrator" ]
}, {
"name" : "navbar.workflow",
"logoId" : "cogs",
"configType" : "menu",
"groups" : [ "administrator" ],
"items" : [ {
"name" : "navbar.workflow.scope_tags",
"logoId" : "cogs",
"configType" : "process-config",
"configKey" : "scope-tags"
}, {
"name" : "navbar.workflow.processes",
"logoId" : "cogs",
"configType" : "process-config",
"configKey" : "processes"
}, {
"name" : "navbar.workflow.process_searches",
"logoId" : "search",
"configType" : "process-config",
"configKey" : "process-searches"
}, {
"name" : "navbar.workflow.process_variable_declarations",
"logoId" : "file-code-o",
"configType" : "process-config",
"configKey" : "process-variable-declarations"
}, {
"name" : "navbar.workflow.layouts",
"logoId" : "newspaper-o",
"configType" : "process-config",
"configKey" : "layouts"
}, {
"name" : "navbar.workflow.forms",
"logoId" : "list-alt",
"configType" : "process-config",
"configKey" : "forms"
}, {
"name" : "navbar.workflow.datasources",
"logoId" : "fw fa-database",
"configType" : "process-config",
"configKey" : "datasources"
}, {
"name" : "navbar.workflow.mail_templates",
"logoId" : "envelope",
"configType" : "process-config",
"configKey" : "mail-templates"
}, {
"name" : "navbar.workflow.process_presets",
"logoId" : "book",
"configType" : "process-config",
"configKey" : "process-presets"
}, {
"name" : "navbar.workflow.document_templates",
"logoId" : "cubes",
"configType" : "process-config",
"configKey" : "document-templates"
}, {
"name" : "config.type.name2",
"logoId" : "edit",
"configType" : "process-config",
"configKey" : "configs"
} ]
} ]
}

@ -0,0 +1,31 @@
{
"name" : "connect-features",
"restApi" : true,
"configKey" : "connect-features",
"payloadType" : "EXCEL",
"config" : [ {
"name" : "file",
"type" : "FILE",
"value" : "connect-features.xlsx"
}, {
"name" : "columnNames",
"type" : "STRING",
"value" : ""
}, {
"name" : "sqlStatement",
"type" : "STRING",
"value" : "select * from features"
}, {
"name" : "columnNameLineNumber",
"type" : "INT",
"value" : 1
}, {
"name" : "skipEmptyLines",
"type" : "BOOLEAN",
"value" : false
}, {
"name" : "skipEmptyColumns",
"type" : "BOOLEAN",
"value" : false
} ]
}

File diff suppressed because it is too large Load Diff

@ -3,6 +3,79 @@
"configKey" : "simple-connect",
"page" : 0,
"components" : [ {
"label" : "Optionale Features",
"mask" : false,
"tableView" : true,
"alwaysEnabled" : false,
"type" : "dualmultiselect",
"input" : true,
"key" : "connect-features",
"defaultValue" : [ ],
"validate" : {
"customMessage" : "",
"json" : "",
"required" : false,
"custom" : "",
"customPrivate" : false
},
"conditional" : {
"show" : "",
"when" : "",
"json" : "",
"eq" : ""
},
"data" : {
"url" : "api/v1/scopes/{{ context.scopeId }}/processes/{{ context.processId }}/datasources/connect-features/query",
"values" : [ { } ]
},
"template" : "{{ item.key }}",
"tabs" : null,
"properties" : { },
"tags" : [ ],
"encrypted" : false,
"customConditional" : "",
"logic" : [ ],
"projection" : "key",
"reorder" : false,
"placeholder" : "",
"prefix" : "",
"customClass" : "",
"suffix" : "",
"multiple" : false,
"protected" : false,
"unique" : false,
"persistent" : true,
"hidden" : false,
"clearOnHide" : true,
"dataGridLabel" : false,
"labelPosition" : "top",
"labelWidth" : 30,
"labelMargin" : 3,
"description" : "",
"errorLabel" : "",
"tooltip" : "",
"hideLabel" : false,
"tabindex" : "",
"disabled" : false,
"autofocus" : false,
"dbIndex" : false,
"customDefaultValue" : "",
"calculateValue" : "",
"allowCalculateOverride" : false,
"widget" : null,
"refreshOn" : "",
"clearOnRefresh" : false,
"validateOn" : "change",
"dataSrc" : "url",
"filter" : true,
"filterPlaceholder" : "Tippen um zu filtern.",
"labelAll" : "Alle Werte",
"labelSelected" : "Ausgewählte Werte",
"buttonSelectAll" : "Alle auswählen",
"buttonDeselectAll" : "Alle abwählen",
"selectAll" : false,
"id" : "ecfbj1k"
}, {
"label" : "Cluster",
"mask" : false,
"tableView" : true,
@ -111,7 +184,7 @@
"mask" : false,
"inputType" : "text",
"inputMask" : "",
"id" : "eigbtac"
"id" : "e757btk"
}, {
"label" : "Name",
"labelPosition" : "left-left",
@ -186,7 +259,7 @@
"mask" : false,
"inputType" : "text",
"inputMask" : "",
"id" : "e4a12rg"
"id" : "e31r2lu8"
}, {
"label" : "Size",
"labelPosition" : "left-left",
@ -252,7 +325,7 @@
"refreshOn" : "",
"clearOnRefresh" : false,
"validateOn" : "change",
"id" : "er1boyl"
"id" : "esx4ewv"
}, {
"label" : "Service",
"labelPosition" : "left-left",
@ -327,7 +400,7 @@
"mask" : false,
"inputType" : "text",
"inputMask" : "",
"id" : "exyw29"
"id" : "e8wrmxk"
} ],
"tabs" : null,
"reorder" : false,
@ -370,7 +443,7 @@
"custom" : "",
"customPrivate" : false
},
"id" : "e6x001"
"id" : "etkc4bga"
} ],
"tabs" : null,
"encrypted" : false,
@ -410,7 +483,7 @@
"clearOnRefresh" : false,
"validateOn" : "change",
"tree" : true,
"id" : "eon3nm"
"id" : "eqym18"
}, {
"label" : "progress_current",
"labelPosition" : "left-left",
@ -476,7 +549,7 @@
"refreshOn" : "",
"clearOnRefresh" : false,
"validateOn" : "change",
"id" : "ek7v2m"
"id" : "eofa72"
}, {
"label" : "progress_max",
"labelPosition" : "left-left",
@ -542,7 +615,7 @@
"refreshOn" : "",
"clearOnRefresh" : false,
"validateOn" : "change",
"id" : "ern7sil"
"id" : "ekfiepc"
}, {
"label" : "HTML",
"labelPosition" : "left-left",
@ -609,7 +682,7 @@
"clearOnRefresh" : false,
"validateOn" : "change",
"tag" : "p",
"id" : "eismamu"
"id" : "euz92uc"
}, {
"label" : "HTML",
"labelPosition" : "left-left",
@ -618,7 +691,7 @@
"attr" : "",
"value" : ""
} ],
"content" : "<div class=\"h3\">\n <a\n target=\"_blank\"\n href=\"{{ 'https://' + (!!(data.cluster)?data.cluster.stage:'cluster.stage') + '-' + (!!(data.tenant)?data.tenant.key:'tenant.key') + '-' + (!!(data.cluster)?data.cluster.name:'cluster.name') + '-01.smardigo.digital' }}\">\n {{ 'https://' + (!!(data.cluster)?data.cluster.stage:'cluster.stage') + '-' + (!!(data.tenant)?data.tenant.key:'tenant.key') + '-' + (!!(data.cluster)?data.cluster.name:'cluster.name') + '-01.smardigo.digital' }}\n </a>\n</div>",
"content" : "<div class=\"h3\">\n <a\n target=\"_blank\"\n href=\"{{ 'https://' + (!!(data.cluster)?data.cluster.stage:'cluster.stage') + '-' + (!!(data.tenant)?data.tenant.key:'tenant.key') + '-' + (!!(data.cluster)?data.cluster.name:'cluster.name') + '-01-connect.smardigo.digital' }}\">\n {{ 'https://' + (!!(data.cluster)?data.cluster.stage:'cluster.stage') + '-' + (!!(data.tenant)?data.tenant.key:'tenant.key') + '-' + (!!(data.cluster)?data.cluster.name:'cluster.name') + '-01-connect.smardigo.digital' }}\n </a>\n</div>",
"refreshOnChange" : true,
"mask" : false,
"tableView" : true,
@ -676,6 +749,140 @@
"clearOnRefresh" : false,
"validateOn" : "change",
"tag" : "p",
"id" : "e6jdzxn"
"id" : "edletnj"
}, {
"label" : "HTML",
"labelPosition" : "left-left",
"className" : "",
"attrs" : [ {
"attr" : "",
"value" : ""
} ],
"content" : "<div class=\"h3\">\n <a\n target=\"_blank\"\n href=\"{{ 'https://' + (!!(data.cluster)?data.cluster.stage:'cluster.stage') + '-' + (!!(data.tenant)?data.tenant.key:'tenant.key') + '-' + (!!(data.cluster)?data.cluster.name:'cluster.name') + '-01-wordpress.smardigo.digital' }}\">\n {{ 'https://' + (!!(data.cluster)?data.cluster.stage:'cluster.stage') + '-' + (!!(data.tenant)?data.tenant.key:'tenant.key') + '-' + (!!(data.cluster)?data.cluster.name:'cluster.name') + '-01-wordpress.smardigo.digital' }}\n </a>\n</div>",
"refreshOnChange" : true,
"mask" : false,
"tableView" : true,
"alwaysEnabled" : false,
"type" : "htmlelement",
"input" : false,
"key" : "html4",
"validate" : {
"customMessage" : "",
"json" : "",
"required" : false,
"custom" : "",
"customPrivate" : false
},
"conditional" : {
"show" : "",
"when" : "",
"json" : "",
"eq" : ""
},
"tabs" : null,
"encrypted" : false,
"properties" : { },
"tags" : null,
"customConditional" : "show = data['connect-features'].includes(\"wordpress\") ",
"logic" : [ ],
"refreshOn" : "data",
"reorder" : false,
"placeholder" : "",
"prefix" : "",
"customClass" : "",
"suffix" : "",
"multiple" : false,
"defaultValue" : null,
"protected" : false,
"unique" : false,
"persistent" : false,
"hidden" : false,
"clearOnHide" : true,
"dataGridLabel" : false,
"labelWidth" : 30,
"labelMargin" : 3,
"description" : "",
"errorLabel" : "",
"tooltip" : "",
"hideLabel" : false,
"tabindex" : "",
"disabled" : false,
"autofocus" : false,
"dbIndex" : false,
"customDefaultValue" : "",
"calculateValue" : "",
"allowCalculateOverride" : false,
"widget" : null,
"clearOnRefresh" : false,
"validateOn" : "change",
"tag" : "p",
"id" : "e53jrex"
}, {
"label" : "HTML",
"labelPosition" : "left-left",
"className" : "",
"attrs" : [ {
"attr" : "",
"value" : ""
} ],
"content" : "<div class=\"h3\">\n <a\n target=\"_blank\"\n href=\"{{ 'https://' + (!!(data.cluster)?data.cluster.stage:'cluster.stage') + '-keycloak-01.smardigo.digital/auth/admin/' + (!!(data.tenant)?data.tenant.key:'tenant.key') + '/console' }}\">\n {{ 'https://' + (!!(data.cluster)?data.cluster.stage:'cluster.stage') + '-keycloak-01.smardigo.digital/auth/admin/' + (!!(data.tenant)?data.tenant.key:'tenant.key') + '/console' }}\n </a>\n</div>",
"refreshOnChange" : true,
"mask" : false,
"tableView" : true,
"alwaysEnabled" : false,
"type" : "htmlelement",
"input" : false,
"key" : "html3",
"validate" : {
"customMessage" : "",
"json" : "",
"required" : false,
"custom" : "",
"customPrivate" : false
},
"conditional" : {
"show" : "",
"when" : "",
"json" : "",
"eq" : ""
},
"tabs" : null,
"encrypted" : false,
"properties" : { },
"tags" : null,
"customConditional" : "",
"logic" : [ ],
"refreshOn" : "data",
"reorder" : false,
"placeholder" : "",
"prefix" : "",
"customClass" : "",
"suffix" : "",
"multiple" : false,
"defaultValue" : null,
"protected" : false,
"unique" : false,
"persistent" : false,
"hidden" : false,
"clearOnHide" : true,
"dataGridLabel" : false,
"labelWidth" : 30,
"labelMargin" : 3,
"description" : "",
"errorLabel" : "",
"tooltip" : "",
"hideLabel" : false,
"tabindex" : "",
"disabled" : false,
"autofocus" : false,
"dbIndex" : false,
"customDefaultValue" : "",
"calculateValue" : "",
"allowCalculateOverride" : false,
"widget" : null,
"clearOnRefresh" : false,
"validateOn" : "change",
"tag" : "p",
"id" : "ejuaoe9"
} ]
}

@ -11,6 +11,10 @@
"type" : "object",
"classification" : "PRIVATE"
},
"connect-features" : {
"type" : "object",
"classification" : "PRIVATE"
},
"creation_date" : {
"type" : "date",
"classification" : "PRIVATE"
@ -43,6 +47,10 @@
"type" : "long",
"classification" : "PRIVATE"
},
"selected_tenant" : {
"type" : "object",
"classification" : "PRIVATE"
},
"service" : {
"type" : "string",
"classification" : "PRIVATE"

File diff suppressed because it is too large Load Diff

@ -4,6 +4,7 @@ def env = [
smardigo_management_action: smardigoManagementAction,
cluster_name: cluster.name,
cluster_service: cluster.service,
cluster_services: [cluster.service] + execution.getVariable('connect-features'),
cluster_size: cluster.size,
stage: cluster.stage,
current_realm_name: tenant.key,
@ -14,8 +15,18 @@ def env = [
def ansibleCommand= 'ansible-playbook ' + smardigoManagementAction + '.yml --vault-password-file ~/vault-pass'
def ansibleEnvironment= ' -e \"'
env.each { key, val ->
if (val instanceof List) {
ansibleEnvironment+= key + '=['
val.each { _val ->
ansibleEnvironment+='\'' + _val + '\','
}
ansibleEnvironment = ansibleEnvironment.substring(0, ansibleEnvironment.length() - 1);
ansibleEnvironment+='] '
} else {
ansibleEnvironment+= key + '=\'' + val + '\' '
}
}
ansibleEnvironment = ansibleEnvironment.substring(0, ansibleEnvironment.length() - 1);
ansibleEnvironment+= '\"'
ansibleCommand += ansibleEnvironment

@ -2,7 +2,6 @@
dev-awx-01
[connect]
# <stage>-<tenant>-<name>-<node>
dev-management-smardigo-01
[elastic]

@ -1,3 +1,3 @@
{
"text": "Role role {{ role_name }} on <{{ service_name }}> finished successfully."
"text": "Role role {{ role_name }} on <{{ inventory_hostname }}> finished successfully."
}

@ -1,3 +1,3 @@
{
"text": "Start role {{ role_name }} on <{{ service_name }}>."
"text": "Start role {{ role_name }} on <{{ inventory_hostname }}>."
}

@ -1,3 +1,3 @@
{
"text": "Removed {{ service_name }} on {{ stage_server_url_host }} successfully."
"text": "Removed {{ inventory_hostname }} on {{ stage_server_domain }} successfully."
}

@ -1,3 +1,3 @@
{
"text": "Removing {{ service_name }} on {{ stage_server_url_host }}."
"text": "Removing {{ inventory_hostname }} on {{ stage_server_domain }}."
}

@ -18,7 +18,7 @@ datasources:
# <int> org id. will default to orgId 1 if not specified
orgId: 1
# <string> url
url: http://{{ service_name }}-prometheus:9090
url: http://{{ inventory_hostname }}-prometheus:9090
# <string> database password, if used
password:
# <string> database user, if used

@ -7,7 +7,7 @@ global:
# Attach these labels to any time series or alerts when communicating with
# external systems (federation, remote storage, Alertmanager).
external_labels:
monitor: '{{ stage_server_name }}'
monitor: '{{ inventory_hostname }}'
# Load and evaluate rules in this file every 'evaluation_interval' seconds.
rule_files:
@ -19,7 +19,7 @@ alerting:
- scheme: http
static_configs:
- targets:
- "{{ service_name }}-alertmanager:9093"
- "{{ inventory_hostname }}-alertmanager:9093"
# A scrape configuration containing exactly one endpoint to scrape:
# Here it's Prometheus itself.
@ -32,7 +32,7 @@ scrape_configs:
- job_name: 'prometheus'
static_configs:
- targets: [
'{{ service_name }}-prometheus:9090'
'{{ inventory_hostname }}-prometheus:9090'
]
labels:
env: {{ stage }}

@ -6,7 +6,7 @@
# admin api (dashboard, rest api, ...)
[http.routers.api]
rule = "Host(`{{ stage_server_name }}.{{ domain }}`)"
rule = "Host(`{{ inventory_hostname }}.{{ domain }}`)"
entrypoints = ["admin-traefik"]
middlewares = ["traefik-auth"]
service = "api@internal"
@ -15,7 +15,7 @@
# metrics api (prometheus)
[http.routers.metrics]
rule = "Host(`{{ stage_server_name }}.{{ domain }}`) && Path(`/metrics`)"
rule = "Host(`{{ inventory_hostname }}.{{ domain }}`) && Path(`/metrics`)"
entrypoints = ["admin-service"]
# middlewares = ["traefik-auth"]
service = "prometheus@internal"

@ -1,10 +0,0 @@
git archive --format zip --output ../hetzner-ansible.zip master
scp ../hetzner-ansible.zip sven.ketelsen@dev-awx-01.smardigo.digital:~/hetzner-ansible.zip
#cd /var/lib/rancher/k3s/storage/pvc-57241168-6a49-4a8e-be84-748267fe1c5a_default_awx-projects-claim
#mv hetzner-ansible hetzner-ansible-old<...>
#mkdir hetzner-ansible
#cd hetzner-ansible
#cp ~/hetzner-ansible.zip .
#unzip hetzner-ansible.zip
#hetzner-ansible.zip
Loading…
Cancel
Save