DEV-316: fixing linter problems

master
Görz, Friedrich 4 years ago committed by Ketelsen, Sven
parent 52b2bc71d5
commit 76289d2242

@ -0,0 +1,4 @@
exclude_paths:
- .ansible/
- test*.yml

@ -41,7 +41,9 @@ common_apt_dependencies:
- bash-completion
- python3-pip
common_pip_dependencies: []
common_pip_dependencies:
- docker-compose
use_ssl: true
http_s: "http{{ use_ssl | ternary('s', '', omit) }}"

@ -53,7 +53,7 @@
- role: hcloud
when: "'hcloud' in group_names"
- role: hetzner-state
- role: hetzner_state
vars:
hetzner_state: 'started'
when: "'hcloud' in group_names"

@ -14,11 +14,13 @@
oauth_token: "{{ digitalocean_authentication_token }}"
register: do_sshkeys_found
- set_fact:
- name: "Combine default droplet config with settings vor inventory_hostname"
set_fact:
droplet_combined: "{{ droplet_defaults | combine(droplet) }}"
- block:
- set_fact:
- name: "Creating tag names by RegEx on droplet name"
set_fact:
tag_service: "{{ droplet.name | regex_search('[a-z]+-([a-z]+)-[0-9]+','\\1') | first | string }}"
tag_stage: "{{ droplet.name | regex_search('([a-z]+)-[a-z]+-[0-9]+','\\1') | first | string }}"
@ -48,7 +50,7 @@
register: tag_response
loop: "{{ droplet.tags }}"
- name: "Set fact"
- name: "Set server_ip by fact parsing"
delegate_to: localhost
set_fact:
stage_server_ip: "{{ item }}"

@ -39,7 +39,7 @@
tags:
- update_dns
- name: "Delete DNS entry for <{{ record_data }}:{{ record_name }}> if necessary"
- name: "Delete DNS entry for <{{ record_data }}:{{ record_name }}> if necessary"
uri:
method: DELETE
url: "https://api.digitalocean.com/v2/domains/{{ domain }}/records/{{ domain_record.id }}"
@ -48,8 +48,8 @@
return_content: yes
status_code: 204
when:
domain_record.ip != '-'
and record_data != domain_record.ip
- domain_record.ip != '-'
- record_data != domain_record.ip
delegate_to: 127.0.0.1
become: false
tags:

@ -1,20 +1,7 @@
---
- name: "Register variable for docker networks"
shell: docker network ls
register: docker_networks
changed_when: false
when: docker_enabled
- name: "Docker network create back-tier"
shell: docker network create back-tier
when:
- docker_enabled
- docker_networks.stdout.find("back-tier") == -1
- name: "Docker network create front-tier"
shell: docker network create front-tier
when:
- docker_enabled
- docker_networks.stdout.find("front-tier") == -1
- name: "Create Docker network"
docker_network:
name: "{{ item }}"
loop:
- front-tier
- back-tier

@ -39,11 +39,10 @@
- update_deployment
- name: "Stopping <{{ current_service_id }}>"
shell: docker-compose down
args:
chdir: '{{ service_base_path }}/{{ current_service_id }}'
community.docker.docker_compose:
project_src: '{{ service_base_path }}/{{ current_service_id }}'
state: absent
when: check_docker_compose_file.stat.exists
ignore_errors: yes
tags:
- update_deployment
@ -70,16 +69,10 @@
current_owner: "{{ docker_owner }}"
current_group: "{{ docker_group }}"
- name: "Updating docker image for <{{ current_service_id }}>"
shell: docker-compose pull
args:
chdir: '{{ service_base_path }}/{{ current_service_id }}'
tags:
- update_deployment
- name: "Starting <{{ current_service_id }}>"
shell: docker-compose up -d
args:
chdir: '{{ service_base_path }}/{{ current_service_id }}'
community.docker.docker_compose:
project_src: '{{ service_base_path }}/{{ current_service_id }}'
state: present
pull: true
tags:
- update_deployment

@ -7,19 +7,19 @@ awx_operator_url: "https://raw.githubusercontent.com/ansible/awx-operator/{{ awx
kubernetes_awx_namespace: "awx-test"
kubernetes_awx_postgres_volume_size: "50Gi"
kubernetes_awx_postgres_volume_accessMode: "ReadWriteOnce"
kubernetes_awx_postgres_volume_accessmode: "ReadWriteOnce"
kubernetes_awx_postgres_volume_path: "/mnt/{{ kubernetes_awx_namespace }}/data/postgres"
kubernetes_awx_postgres_pvc_size: "50Gi"
kubernetes_awx_postgres_pvc_accessMode: "ReadWriteOnce"
kubernetes_awx_postgres_pvc_accessmode: "ReadWriteOnce"
kubernetes_awx_project_volume_size: "10Gi"
kubernetes_awx_project_volume_accessMode: "ReadWriteOnce"
kubernetes_awx_project_volume_accessmode: "ReadWriteOnce"
kubernetes_awx_project_volume_path: "/mnt/{{ kubernetes_awx_namespace }}/data/project"
kubernetes_awx_project_pvc_size: "10Gi"
kubernetes_awx_project_pvc_accessMode: "ReadWriteOnce"
kubernetes_awx_project_pvc_accessmode: "ReadWriteOnce"
kubernetes_awx_service_port: "80"
kubernetes_awx_service_targetPort: "80"
kubernetes_awx_service_targetport: "80"
awx_ansible_username: "ansible"
awx_ansible_password: "ansible"

@ -1,6 +1,7 @@
---
- set_fact:
- name: "Initilize VARs"
set_fact:
awx_type_id: "None"
changed_when: False
tags:

@ -1,5 +1,6 @@
---
- set_fact:
- name: "Initilize VARs"
set_fact:
found_credential_id: ""
tags:
- awx_config

@ -1,6 +1,7 @@
---
- set_fact:
- name: "Initialize VARs"
set_fact:
awx_job_template_id: "None"
changed_when: False
tags:

@ -473,7 +473,11 @@
- awx_config
- name: "Create archive for repository <hetzner-ansible>"
shell: cd {{ playbook_dir }} && git archive --format tar.gz -o /tmp/hetzner-ansible.tar.gz HEAD
git:
archive: hetzner-ansible.tar.gz
dest: /tmp/gitrepo
repo: "{{ playbook_dir }}"
version: HEAD
delegate_to: 127.0.0.1
become: false
tags:
@ -501,7 +505,7 @@
- name: "Extract repository archive for <hetzner-ansible>"
unarchive:
src: /tmp/hetzner-ansible.tar.gz
src: /tmp/gitrepo/hetzner-ansible.tar.gz
dest: "{{ awx_project_path }}/hetzner-ansible"
tags:
- awx_config

@ -53,6 +53,7 @@
file:
path: "{{ item }}"
state: directory
mode: '0755'
owner: 'root'
group: 'root'
loop:

@ -11,7 +11,7 @@ spec:
capacity:
storage: {{ kubernetes_awx_postgres_volume_size }}
accessModes:
- {{ kubernetes_awx_postgres_volume_accessMode }}
- {{ kubernetes_awx_postgres_volume_accessmode }}
hostPath:
path: "{{ kubernetes_awx_postgres_volume_path }}"
@ -28,7 +28,7 @@ spec:
capacity:
storage: {{ kubernetes_awx_project_volume_size }}
accessModes:
- {{ kubernetes_awx_project_volume_accessMode }}
- {{ kubernetes_awx_project_volume_accessmode }}
hostPath:
path: "{{ kubernetes_awx_project_volume_path }}"
@ -41,7 +41,7 @@ metadata:
spec:
storageClassName: manual
accessModes:
- {{ kubernetes_awx_postgres_pvc_accessMode }}
- {{ kubernetes_awx_postgres_pvc_accessmode }}
resources:
requests:
storage: {{ kubernetes_awx_postgres_pvc_size }}
@ -55,7 +55,7 @@ metadata:
spec:
storageClassName: manual
accessModes:
- {{ kubernetes_awx_project_pvc_accessMode }}
- {{ kubernetes_awx_project_pvc_accessmode }}
resources:
requests:
storage: {{ kubernetes_awx_project_pvc_size }}
@ -70,7 +70,7 @@ spec:
admin_user: {{ awx_admin_username }}
projects_persistence: true
projects_existing_claim: awx-project-claim-0
projects_storage_access_mode: {{ kubernetes_awx_project_pvc_accessMode }}
projects_storage_access_mode: {{ kubernetes_awx_project_pvc_accessmode }}
projects_storage_size: {{ kubernetes_awx_project_pvc_size }}
---
@ -84,7 +84,7 @@ spec:
- name: http
protocol: TCP
port: {{ kubernetes_awx_service_port }}
targetPort: {{ kubernetes_awx_service_targetPort }}
targetPort: {{ kubernetes_awx_service_targetport }}
externalIPs:
- {{ stage_server_ip }}

@ -16,6 +16,7 @@
blockinfile:
marker: "# {mark} managed by ansible (hosts config for {{ inventory_hostname }})"
path: "/etc/hosts"
mode: '0644'
state: present
create: yes
block: |
@ -89,6 +90,7 @@
state: directory
owner: '{{ item }}'
group: '{{ item }}'
mode: '0755'
loop: '{{ smardigo_plattform_users }}'
when: docker_enabled
tags:
@ -132,6 +134,7 @@
file:
state: directory
path: '/etc/bash_completion.d'
mode: '0755'
tags:
- install
@ -148,15 +151,15 @@
ansible.builtin.get_url:
url: "https://raw.githubusercontent.com/docker/compose/{{ docker_compose_version }}/contrib/completion/bash/docker-compose"
dest: "/etc/bash_completion.d/docker-compose"
mode: '644'
mode: '0644'
when: docker_enabled
tags:
- install
- name: "Upgrade all packages"
apt:
name: '*'
state: latest
update_cache: yes
upgrade: yes
tags:
- install
- upgrade
@ -168,6 +171,7 @@
state: directory
owner: 'root'
group: 'root'
mode: '0755'
when: docker_enabled
tags:
- config
@ -191,16 +195,7 @@
state: directory
owner: 'root'
group: 'root'
when: docker_enabled
tags:
- config
- name: "Ensure docker daemon configuration directory exists"
file:
path: '/etc/docker'
state: directory
owner: 'root'
group: 'root'
mode: '0755'
when: docker_enabled
tags:
- config
@ -209,7 +204,7 @@
file:
state: absent
path: '/etc/docker/daemon.json'
when: docker_enabled == false
when: not docker_enabled
tags:
- config

@ -1 +1,5 @@
---
- name: harbor restart
systemd:
name: harbor
state: restarted

@ -1,19 +1,29 @@
---
#- name: "harbor BASE settings"
# block:
# - set_fact:
# harbor_base_configuration_merged: '{{ harbor_base_configuration | combine( dict ,recursive=True ) }}'
- name: "Check if harbor is up and running"
delegate_to: 127.0.0.1
become: false
uri:
url: "{{ harbor_external_url }}/api/v2.0/configurations"
user: '{{ harbor_admin_username }}'
password: '{{ harbor_admin_password }}'
method: GET
body_format: json
force_basic_auth: yes
status_code: [200]
register: check_harbor
delay: 10
retries: 20
until: check_harbor.status in [200]
# - name: "BLOCK: Configure harbor BASE settings"
# include_tasks: configure_base_config.yml
# vars:
# base_configuration: '{{ harbor_base_configuration_merged }}'
# args:
# apply:
# tags:
# - harbor-configure-base
## end of block for base settings
- name: "Configure harbor BASE settings"
include_tasks: configure_base_config.yml
vars:
base_configuration: '{{ harbor_base_configuration }}'
args:
apply:
tags:
- harbor-configure-base
- name: "Create object of templated harbor projects"
set_fact:

@ -17,7 +17,8 @@
delay: 10
retries: 3
- debug:
- name: "DEBUG"
debug:
msg: 'found projects: {{ project_exists.json }}'
when: debug

@ -1,6 +1,7 @@
---
- set_fact:
- name: "Initialze VARs due to hardcoded stuff in harbor API"
set_fact:
member_state: '{{ member.member_state | default("present") }}'
harbor_member_roles:
-
@ -43,7 +44,8 @@
delay: 10
retries: 3
- set_fact:
- name: "Set fact"
set_fact:
group_type: "{{ ( harbor_member_grouptypes | selectattr('name','==',( member.group_type | lower )) | list | first ).group_type }}"
role_id: "{{ ( harbor_member_roles | selectattr('name','==',( member.role| lower ) ) | list | first ).role_id | int }}"

@ -17,7 +17,8 @@
delay: 10
retries: 3
- set_fact:
- name: "Set fact"
set_fact:
body_content: "{ \"{{ meta_data_elem.key }}\":\"{{ meta_data_elem.value }}\" }"
- name: "Add meta_data: <<{{ meta_data_elem.key }}>>"

@ -1,8 +1,10 @@
---
- set_fact:
- name: "Initialze VARs"
set_fact:
tok_obj: {}
- debug:
- name: "DEBUG"
debug:
msg: "DEBUGGING - robot_token: {{ robot_token }}"
when:
- debug

@ -1,5 +1,6 @@
---
- set_fact:
- name: "Initialze VARs"
set_fact:
token_object_combined: {}
- name: "Get all robot tokens"
@ -41,7 +42,8 @@
- all_robot_tokens.json | selectattr('name','contains',token_object.name) | list | length == 0
- token_state == 'present'
- set_fact:
- name: "Set VARs if current robot token object already exists"
set_fact:
robots_id: "{{ ( all_robot_tokens.json | selectattr('name','contains',token_object.name) | list | first ).id }}"
remote_robot_token_object: "{{ all_robot_tokens.json | selectattr('name','contains',token_object.name) | list | first }}"
token_object_combined: "{{ all_robot_tokens.json | selectattr('name','contains',token_object.name) | list | first | combine(token_object, recursive=True) }}"
@ -82,7 +84,8 @@
- name: "Block to Update robot token data"
block:
- debug:
- name: "DEBUG"
debug:
msg: "DEBUGGING before dropping - combined token_object_combined: {{ token_object_combined }}"
when:
- debug
@ -93,7 +96,8 @@
set_fact:
token_object_dropped: "{{ token_object_dropped | combine( { item.key: item.value } ) }}"
with_dict: "{{ token_object_combined }}"
when: "{{ item.key not in ['secret','secret_refresh'] }}"
when:
- item.key not in ['secret','secret_refresh']
# harbor API behaviour:
# in case of initial creation for robot token objects, harbor creates a name for this
@ -105,14 +109,17 @@
# so harbor API forces me to create this workaround to avoid such errors
#
# part 1: define name of object
- set_fact:
- name: "Set fact"
set_fact:
robot_token_name_cleaned:
name: 'robot${{ token_object_dropped.name }}'
# part 2: override name with new defined name of object
- set_fact:
- name: "Set fact"
set_fact:
token_object_finished: '{{ token_object_dropped | combine(robot_token_name_cleaned, recursive=True) }}'
- debug:
- name: "DEBUG"
debug:
msg: "DEBUGGING after dropping - combined token_object_finished: {{ token_object_finished }}"
when:
- debug

@ -14,6 +14,9 @@
file:
state: directory
path: '{{ service_base_path }}/{{ inventory_hostname }}'
mode: 0755
owner: root
group: root
tags:
- update_deployment
- update_config
@ -55,7 +58,8 @@
when:
- not harbor_tarball.stat.exists
- set_fact:
- name: "Set fact"
set_fact:
remote_docker_compose_file_path: '{{ service_base_path }}/{{ inventory_hostname }}/harbor/docker-compose.yml'
- name: "Check if {{ inventory_hostname }}/harbor/docker-compose.yml exists"
@ -79,13 +83,17 @@
group: 'root'
mode: 0644
# due to missing customized docker-compose-file for smardigo enviroment,
# every start of harbor standard installation will fail in current smardigo enviroment
# therefore we do whitlisting for all non-zero script return codes
# => failed_when statement for ansible-lint
- name: "Exec harbor install.sh "
ansible.builtin.shell:
ansible.builtin.command:
cmd: './install.sh {{ harbor_install_opts | default("--with-trivy --with-chartmuseum") }}'
chdir: '{{ service_base_path }}/{{ inventory_hostname }}/harbor/'
creates: '{{ remote_docker_compose_file_path }}'
register: run_installscript
ignore_errors: yes
when:
- not harbor_installation.stat.exists
- name: "Stopping harbor"
community.docker.docker_compose:
@ -107,6 +115,9 @@
copy:
src: '{{ remote_docker_compose_file_path }}'
dest: '{{ remote_docker_compose_file_path }}_from_installsh'
owner: 'root'
group: 'root'
mode: 0644
remote_src: yes
when:
- not harbor_installation.stat.exists
@ -115,11 +126,14 @@
copy:
src: '{{ service_base_path }}/{{ inventory_hostname }}/harbor/common/config/nginx/nginx.conf'
dest: '{{ service_base_path }}/{{ inventory_hostname }}/harbor/common/config/nginx/nginx.conf_orig'
owner: 'root'
group: 'root'
mode: 0644
remote_src: yes
when:
- not harbor_installation.stat.exists
- name:
- name: "Removing lines with proxy_set_header due to running behind traefik"
ansible.builtin.lineinfile:
path: '{{ service_base_path }}/{{ inventory_hostname }}/harbor/common/config/nginx/nginx.conf'
state: absent
@ -130,7 +144,8 @@
src: '{{ remote_docker_compose_file_path }}'
register: docker_compose_file_remote_encoded
- set_fact:
- name: "Set fact"
set_fact:
harbor_dockercompose_merged: '{{ docker_compose_file_remote_encoded.content | b64decode | from_yaml | combine(harbor_dockercompose_customized, recursive=True) }}'
- name: "Create docker-compose.yml with merged VARs"
@ -141,12 +156,7 @@
group: 'root'
mode: '0644'
register: docker_compose_change
- name: "Ensure harbor systemd service restarted"
systemd:
name: harbor
state: restarted
when: docker_compose_change.changed
notify: harbor restart
- name: "Ensure harbor systemd service started"
systemd:

@ -5,6 +5,7 @@
- name: "Reading firewall entry for <{{ current_firewall_name }}>"
set_fact:
firewall_record: "{{ firewall_records | selectattr('name', 'equalto', current_firewall_name) | list | first | default({'name': '-', 'id': '-'}) }}"
firewall_template: "firewall-{{ current_firewall_name }}.json.j2"
tags:
- update_networks
@ -21,7 +22,7 @@
method: POST
url: "https://api.hetzner.cloud/v1/firewalls"
body_format: json
body: "{{ lookup('template','firewall-{{ current_firewall_name }}.json.j2') }}"
body: "{{ lookup('template',firewall_template) }}"
headers:
accept: application/json
authorization: Bearer {{ hetzner_authentication_token }}
@ -39,7 +40,7 @@
method: PUT
url: "https://api.hetzner.cloud/v1/firewalls/{{ firewall_record.id }}"
body_format: json
body: "{{ lookup('template','firewall-{{ current_firewall_name }}.json.j2') }}"
body: "{{ lookup('template',firewall_template) }}"
headers:
accept: application/json
authorization: Bearer {{ hetzner_authentication_token }}

@ -22,7 +22,8 @@
- name: "BLOCK << WITHOUT >> pagination"
block:
- set_fact:
- name: "Get firewall object from list"
set_fact:
lookup_fw_obj: "{{ hcloud_firewalls_all.json.firewalls | community.general.json_query(jsonquery_find_firewall_name) }}"
vars:
jsonquery_find_firewall_name: "[?name=='{{ firewall_object.name }}']"
@ -45,7 +46,8 @@
delegate_to: 127.0.0.1
become: false
- set_fact:
- name: "Get firewall object from list"
set_fact:
lookup_fw_obj: "{{ hcloud_firewalls_all.json.results | community.general.json_query(querystr1) | first | community.general.json_query(querystr2) | community.general.json_query(querystr2) }}"
vars:
querystr1: "[[*].json.firewalls]"
@ -91,7 +93,8 @@
- name: "Delete firewall rule for <<{{ firewall_object.name }}>>"
block:
- set_fact:
- name: "Create firewall object for deactivation"
set_fact:
deactivate_fw_obj:
remove_from: "{{ firewall_object.apply_to }}"

@ -25,8 +25,9 @@
delegate_to: 127.0.0.1
become: false
- set_fact:
id: '{{ ( get_all_clients.json | selectattr("clientId","equalto",argo_client_id) | first ).id }}'
- name: "Extract client_id from all_clients"
set_fact:
id: '{{ ( get_all_clients.json | selectattr("clientId","equalto",client_id) | first ).id }}'
when:
- get_all_clients.json | selectattr('clientId', 'equalto', client_id) | list | length == 1

@ -21,7 +21,8 @@
status_code: [200]
register: get_all_users
- set_fact:
- name: "Extract group_id/user_id we are searching for from all available ones"
set_fact:
group_id: '{{ ( get_all_groups.json | selectattr("name","equalto",destination_group) | first ).id }}'
user_id: '{{ ( get_all_users.json | selectattr("username","equalto",username) | first ).id }}'
@ -36,9 +37,6 @@
status_code: [200]
register: get_all_groups_for_current_user
- set_fact:
already_in_group: '{{ get_all_groups_for_current_user.json | selectattr("name","equalto",destination_group) }}'
- name: "ADDING USER <{{ client_id }}> for realm <{{ realm_name }}> to Group <<{{ destination_group }}>>"
delegate_to: 127.0.0.1
become: false

@ -23,11 +23,10 @@
- update_deployment
- name: "Stop {{ inventory_hostname }}"
shell: docker-compose down
args:
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
community.docker.docker_compose:
project_src: '{{ service_base_path }}/{{ inventory_hostname }}'
state: absent
when: check_docker_compose_file.stat.exists
ignore_errors: yes
tags:
- update_deployment
@ -62,9 +61,9 @@
# - update_deployment
- name: "Start {{ inventory_hostname }}"
shell: docker-compose up -d
args:
chdir: '{{ service_base_path }}/{{ inventory_hostname }}'
community.docker.docker_compose:
state: restart
project_src: '{{ service_base_path }}/{{ inventory_hostname }}'
tags:
- update_deployment

@ -1,5 +1,6 @@
---
- set_fact:
- name: "Initialize VARs"
set_fact:
api_path: '/s/{{ es_space }}/api/saved_objects'
es_object_type: dashboard
dashboard_exists: False
@ -18,18 +19,21 @@
register: all_dashboards
become: false
- set_fact:
- name: "Lookup dashboard object"
set_fact:
lookup_dashboard_object: '{{ all_dashboards.json | community.general.json_query(querystr1) | first | community.general.json_query(dashboard_query) }}'
vars:
querystr1: "[saved_objects[*]]"
dashboard_query: "[?attributes.title=='{{ elastic_dashboard.attributes.title }}']"
- set_fact:
- name: "Set switch VAR"
set_fact:
dashboard_exists: True
when:
- lookup_dashboard_object | length > 0
- set_fact:
- name: "Drop not needed keys from dict"
set_fact:
elastic_dashboard_cleaned: "{{ elastic_dashboard_cleaned | combine( { item.key: item.value } ) }}"
with_dict: '{{ elastic_dashboard }}'
when:
@ -52,35 +56,40 @@
register: all_searches
become: false
- set_fact:
- name: "Lookup search object"
set_fact:
lookup_search_object: '{{ all_searches.json | community.general.json_query(querystr1) | first | community.general.json_query(search_query) }}'
vars:
querystr1: "[saved_objects[*]]"
search_query: "[?attributes.title=='{{ elastic_dashboard.references[0].search_refname }}']"
- set_fact:
- name: "Set switch VAR"
set_fact:
search_exists: True
when:
- lookup_search_object | length > 0
- debug:
- name: "DEBUG"
debug:
msg: 'lookup_search_object{{ lookup_search_object }}'
- set_fact:
- name: "Set VAR"
set_fact:
panelindex_uuid: '{{ elastic_dashboard.references[0].search_refname | to_uuid }}'
- name: "Doing evil string concatination with ansible in addition with variables"
delegate_to: localhost
set_fact:
panelsJSON: '{{ (''[{"version":"7.16.1","type":"search","gridData":{"x":0,"y":0,"w":48,"h":28,"i":"'' + ( panelindex_uuid | string ) + ''"},"panelIndex":"'' + ( panelindex_uuid | string ) + ''","embeddableConfig":{"enhancements":{}},"panelRefName":"panel_'' + ( panelindex_uuid | string ) + ''"}]'') | string }}'
panelsjson: '{{ (''[ { "version":"7.16.1","type":"search","gridData":{"x":0,"y":0,"w":48,"h":28,"i":"'' + ( panelindex_uuid | string ) + ''"},"panelIndex":"'' + ( panelindex_uuid | string ) + ''","embeddableConfig":{"enhancements":{} },"panelRefName":"panel_'' + ( panelindex_uuid | string ) + ''" } ]'') | string }}'
-
name: "Prepare step for merging dashboard objects"
delegate_to: localhost
set_fact:
ref_obj_modified:
attributes:
title: '{{ elastic_dashboard.attributes.title }}'
panelsJSON: '{{ panelsJSON | string }}'
panelsJSON: '{{ panelsjson | string }}'
references:
-
name: '{{ panelindex_uuid }}'
@ -105,7 +114,8 @@
set_fact:
elastic_dashboard_cleaned: "{{ elastic_dashboard_cleaned | combine( ref_obj_modified ) }}"
- debug:
- name: "DEBUG"
debug:
msg: 'DEBUG elastic_dashboard_cleaned: {{ elastic_dashboard_cleaned }}'
- name: "Create {{ es_object_type }} <<{{ elastic_dashboard.attributes.title }}>>"

@ -1,5 +1,6 @@
---
- set_fact:
- name: "Initialize VARs"
set_fact:
api_path: '/s/{{ es_space }}/api/saved_objects'
es_object_type: 'index-pattern'
indexpattern_exists: False
@ -17,18 +18,21 @@
register: all_indexpatterns
become: false
- set_fact:
- name: "Lookup index pattern object if exists"
set_fact:
lookup_indexpattern_object: '{{ all_indexpatterns.json | community.general.json_query(querystr1) | first | community.general.json_query(indexpattern_query) }}'
vars:
querystr1: "[saved_objects[*]]"
indexpattern_query: "[?attributes.title=='{{ elastic_indexpattern.attributes.title }}']"
- set_fact:
- name: "Set switch VAR"
set_fact:
indexpattern_exists: True
when:
- lookup_indexpattern_object | length > 0
- set_fact:
- name: "Drop not needed key from dict"
set_fact:
elastic_indexpattern_cleaned: "{{ elastic_indexpattern_cleaned | combine({item.key: item.value}) }}"
with_dict: '{{ elastic_indexpattern }}'
when:

@ -1,5 +1,6 @@
---
- set_fact:
- name: "Initlize VARs"
set_fact:
api_path: '/api/security/role'
role_exists: False
elastic_role_cleaned: {}
@ -16,17 +17,20 @@
register: all_roles
become: false
- set_fact:
- name: "Lookup role object if exists"
set_fact:
lookup_role_object: '{{ all_roles.json | community.general.json_query(roles_query) }}'
vars:
roles_query: "[?name=='{{ elastic_role.name }}']"
- set_fact:
- name: "Set switch VAR"
set_fact:
role_exists: True
when:
- lookup_role_object | length > 0
- set_fact:
- name: "Drop not needed keys from dict"
set_fact:
elastic_role_cleaned: "{{ elastic_role_cleaned | combine({item.key: item.value}) }}"
with_dict: '{{ elastic_role }}'
when:

@ -1,5 +1,6 @@
---
- set_fact:
- name: "Initialze VARs"
set_fact:
api_path: '/s/{{ es_space }}/api/saved_objects'
es_object_type: search
search_exists: False
@ -18,18 +19,21 @@
register: all_searches
become: false
- set_fact:
- name: "Lookup search object if exists"
set_fact:
lookup_search_object: '{{ all_searches.json | community.general.json_query(querystr1) | first | community.general.json_query(search_query) }}'
vars:
querystr1: "[saved_objects[*]]"
search_query: "[?attributes.title=='{{ elastic_search.attributes.title }}']"
- set_fact:
- name: "Set switch VAR"
set_fact:
search_exists: True
when:
- lookup_search_object | length > 0
- set_fact:
- name: "Drop not needed keys from dict"
set_fact:
elastic_search_cleaned: "{{ elastic_search_cleaned | combine({item.key: item.value}) }}"
with_dict: '{{ elastic_search }}'
when:
@ -51,21 +55,25 @@
register: all_indexpatterns
become: false
- set_fact:
- name: "Lookup index pattern object if exists"
set_fact:
lookup_indexpattern_object: '{{ all_indexpatterns.json | community.general.json_query(querystr1) | first | community.general.json_query(indexpattern_query) }}'
vars:
querystr1: "[saved_objects[*]]"
indexpattern_query: "[?attributes.title=='{{ elastic_search.references[0].ref_name }}']"
- set_fact:
- name: "Set switch VAR"
set_fact:
indexpattern_exists: True
when:
- lookup_indexpattern_object | length > 0
- debug:
- name: "DEBUG"
debug:
msg: 'lookup_indexpattern_object:{{ lookup_indexpattern_object }}'
-
name: "Prepare step to combine dicts"
delegate_to: localhost
set_fact:
ref_obj_modified:
@ -93,7 +101,8 @@
set_fact:
elastic_search_cleaned: "{{ elastic_search_cleaned | combine( ref_obj_modified ) }}"
- debug:
- name: "DEBUG"
debug:
msg: 'DEBUG elastic_search_cleaned: {{ elastic_search_cleaned }}'

@ -1,5 +1,6 @@
---
- set_fact:
- name: "Initialize VARs"
set_fact:
api_path: '/api/spaces/space'
space_exists: False
elastic_space_cleaned: {}
@ -16,23 +17,27 @@
register: all_spaces
become: false
- set_fact:
- name: "Lookup space object if exists"
set_fact:
lookup_space_object: "{{ all_spaces.json | community.general.json_query(spaces_query) }}"
vars:
spaces_query: "[?name=='{{ elastic_space.name }}']"
- set_fact:
- name: "Set switch VAR"
set_fact:
space_exists: True
when:
- lookup_space_object | length > 0
- set_fact:
- name: "Drop not needed keys from dict"
set_fact:
elastic_space_cleaned: "{{ elastic_space_cleaned | combine({item.key: item.value}) }}"
with_dict: "{{ elastic_space }}"
when:
- item.key not in ['elastic_state']
- debug:
- name: "DEBUG"
debug:
msg: "{{ lookup_space_object | to_json }}"
- name: "Create space <<{{ elastic_space.name }}>>"

@ -1,5 +1,6 @@
---
- set_fact:
- name: "Initialize VARs"
set_fact:
api_path: '/internal/security/users'
user_exists: False
elastic_user_cleaned__create: {}
@ -17,24 +18,28 @@
register: all_users
become: false
- set_fact:
- name: "Lookup user object if exists"
set_fact:
lookup_user_object: '{{ all_users.json | community.general.json_query(users_query) }}'
vars:
users_query: "[?username=='{{ elastic_user.username }}']"
- set_fact:
- name: "Set switch VAR"
set_fact:
user_exists: True
when:
- lookup_user_object | length > 0
- set_fact:
- name: "Drop not needed keys from dict"
set_fact:
elastic_user_cleaned__create: "{{ elastic_user_cleaned__create | combine({item.key: item.value}) }}"
with_dict: '{{ elastic_user }}'
when:
- item.key not in ['elastic_state']
# make sure to not override userdefined password with initial password
- set_fact:
- name: "Drop not needed keys from dict"
set_fact:
elastic_user_cleaned__update: "{{ elastic_user_cleaned__update | combine({item.key: item.value}) }}"
with_dict: '{{ elastic_user_cleaned__create }}'
when:

@ -1,11 +1,12 @@
---
- set_fact:
- name: "Initializing service variables for <{{ es_index_pattern_service.name }}>"
set_fact:
api_path: '/s/{{ es_space }}/api/saved_objects'
es_search_name: '{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ es_index_pattern_service.name }}'
es_dashboard_name: '{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ es_index_pattern_service.name }}'
es_container_name: '{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-01-{{ es_index_pattern_service.name }}'
- name: "Import smardigo default dashboard and its related objects (index-pattern,search,...)"
- name: "Importing smardigo default dashboard and its related objects (index-pattern,search,...)"
delegate_to: localhost
set_fact:
es_object_smardigo_index_pattern_tenant: "{{ lookup('template','smardigo_index_pattern_tenant.json.j2') }}"
@ -15,7 +16,7 @@
when:
- elastic_state == 'present'
- name: "Print objects to local file"
- name: "Printing service objects to local ndjson file"
delegate_to: localhost
copy:
dest: '/tmp/es_objects_ready_to_import__objects.ndjson'
@ -27,7 +28,7 @@
when:
- elastic_state == 'present'
- name: "Import elastic objects ..."
- name: "Importing service objects to kibana"
delegate_to: localhost
uri:
url: "https://{{ api_endpoint }}{{ api_path }}/_import?overwrite=true"
@ -47,7 +48,7 @@
when:
- elastic_state == 'present'
- name: "Remove temporarily created file"
- name: "Removing temporarily created file"
delegate_to: localhost
file:
state: absent

@ -22,11 +22,10 @@
- update_deployment
- name: "Stop {{ kibana_id }}"
shell: docker-compose down
args:
chdir: '{{ service_base_path }}/{{ kibana_id }}'
community.docker.docker_compose:
project_src: '{{ service_base_path }}/{{ kibana_id }}'
state: absent
when: check_docker_compose_file.stat.exists
ignore_errors: yes
tags:
- update_config
- update_deployment
@ -73,17 +72,11 @@
- update_certs
- update_config
- name: "Update {{ kibana_id }}"
shell: docker-compose pull
args:
chdir: '{{ service_base_path }}/{{ kibana_id }}'
tags:
- update_deployment
- name: "Start {{ kibana_id }}"
shell: docker-compose up -d
args:
chdir: '{{ service_base_path }}/{{ kibana_id }}'
community.docker.docker_compose:
project_src: '{{ service_base_path }}/{{ kibana_id }}'
state: present
pull: true
tags:
- update_certs
- update_config

@ -86,7 +86,8 @@
# using template from exported keycloak client object
# due to needed params but missing in community.general.keycloak_client
# e.g. defaultClientScopes
- set_fact:
- name: "Create json object as VAR from template"
set_fact:
keycloak_realm_create_client: "{{ lookup('template','keycloak-realm-create-client-argocd.json.j2') }}"
vars:
client_redirect_uri: '{{ argo_client_redirect_uris }}'
@ -129,7 +130,8 @@
- inventory_hostname == groups['kube_control_plane'][0]
# available clients: get needed ID
- set_fact:
- name: "Get ID of client by paring argo_realm_clients object"
set_fact:
id_of_client: '{{ ( argo_realm_clients.json | selectattr("clientId","equalto",argo_client_id ) | first ).id }}'
when:
- inventory_hostname == groups['kube_control_plane'][0]
@ -148,13 +150,15 @@
when:
- inventory_hostname == groups['kube_control_plane'][0]
- debug:
- name: "DEBUG"
debug:
msg: "DEBUGGING: {{ client_secret.json.value }}"
when:
- debug
- inventory_hostname == groups['kube_control_plane'][0]
- set_fact:
- name: "Create VAR to overwrite specific helm value - prepare combining dicts"
set_fact:
additional_helm_values:
configs:
secret:
@ -163,12 +167,14 @@
when:
- inventory_hostname == groups['kube_control_plane'][0]
- set_fact:
- name: "Combining helm release values"
set_fact:
combined_helm__release_values: '{{ k8s_argocd_helm__release_values | combine(additional_helm_values, recursive=True) }}'
when:
- inventory_hostname == groups['kube_control_plane'][0]
- debug:
- name: "DEBUG"
debug:
msg: "DEBUGGING: {{ combined_helm__release_values }}"
when:
- debug

@ -6,9 +6,9 @@
- name: Install dependencies
ansible.builtin.package:
name: "{{ item }}"
state: latest
state: present
loop:
- python3-pip
- python3-pip=20.0.2-5ubuntu1.6
when:
- inventory_hostname == groups['kube_control_plane'][0]
tags:

@ -23,7 +23,8 @@
delegate_to: localhost
become: false
- set_fact:
- name: "Setting LVM related VARs"
set_fact:
pvs: "{{ hcloud_volumes_found.hcloud_volume_info | json_query(jmesquery) }}"
vg_name: vg.postgres_backup
lv_name: lv.postgres_backup

Loading…
Cancel
Save