Compare commits

..

No commits in common. 'main' and 'master' have entirely different histories.
main ... master

4
.gitignore vendored

@ -10,7 +10,3 @@ kubespray/
/kubespray/
xvars-*.yml
*/__pycache__/*
# macOS
.DS_Store

@ -1,42 +1,26 @@
---
variables:
AWX_EE_DOCKER_IMAGE_EXTERN: "harbor-01.smardigo.digital/awx/awx-custom-ee"
ANSIBLE_HOST_KEY_CHECKING: "false"
ANSIBLE_FORCE_COLOR: "true"
AWX_EE_DOCKER_IMAGE_EXTERN: "dev-harbor-01.smardigo.digital/awx/awx-custom-ee"
ANSIBLE_HOST_KEY_CHECKING: 'false'
ANSIBLE_FORCE_COLOR: 'true'
image: docker.dev-at.de/gitlab/gitlab-ci-ansible:latest
image: docker.dev-at.de/smardigo/smardigo-ci-ansible
services:
- name: docker.dev-at.de/gitlab/gitlab-ci-ansible:latest
- name: docker-cache.dev-at.de/docker:19-dind
alias: docker
stages:
- lint
- ansible-lint
- ansible-builder
- run-setup
- run-setup-digitalocean
- run-update
- run-patchday-harbor
- run-patchday-elastic
- run-patchday-database
- run-patchday-all
- run-hcloud-firewall
- ansible-run-setup
- ansible-run-kubernetes
- ansible-patchday
###############################################################################
### http://patorjk.com/software/taag/#p=display&f=Doom&t=lint
### _ _ _
### | (_) | |
### | |_ _ __ | |_
### | | | '_ \| __|
### | | | | | | |_
### |_|_|_| |_|\__|
###
###############################################################################
lint-job:
stage: lint
ansible-lint-job:
stage: ansible-lint
script:
- echo "Running lint to check for linting violations"
- echo "Running ansible-lint to check for linting violations"
- ansible-lint -c ansible-lint.cfg
only:
- branches
@ -45,650 +29,272 @@ lint-job:
tags:
- dind
###############################################################################
### https://patorjk.com/software/taag/#p=display&f=Doom&t=ansible-builder
### _ _ _ _ _ _ _
### (_) | | | | | (_) | | |
### __ _ _ __ ___ _| |__ | | ___ ______| |__ _ _ _| | __| | ___ _ __
### / _` | '_ \/ __| | '_ \| |/ _ \______| '_ \| | | | | |/ _` |/ _ \ '__|
### | (_| | | | \__ \ | |_) | | __/ | |_) | |_| | | | (_| | __/ |
### \__,_|_| |_|___/_|_.__/|_|\___| |_.__/ \__,_|_|_|\__,_|\___|_|
###
###############################################################################
.builder-job:
ansible-builder-job:
# A resource group ensures a job is mutually exclusive across different pipelines for the same project.
resource_group: deployment
stage: ansible-builder
before_script:
- echo "CI_ENVIRONMENT_NAME=$CI_ENVIRONMENT_NAME"
- cd ansible-builder
script:
- cp $SSH_KEY_GITLAB_CI_BUILDER_FILE ansible-builder/context/id_ed25519
- echo "Running ansible-builder to build awx execution environment"
- ansible-builder build -f ansible-builder/execution-environment.yml --tag ${CI_ENVIRONMENT_NAME}-${AWX_EE_DOCKER_IMAGE_EXTERN}:latest -c ansible-builder/context
- docker push ${CI_ENVIRONMENT_NAME}-${AWX_EE_DOCKER_IMAGE_EXTERN}:latest
except:
- schedules
tags:
- dind
- harbor # 05.02.22 TODO some runners run into timeouts
.builder-job-devnso:
extends: .builder-job
environment: devnso
resource_group: devnso
before_script:
- export STAGE=devnso
- echo "Running ansible-build to build awx execution environment"
- ansible-builder build -v 3 --tag $AWX_EE_DOCKER_IMAGE_EXTERN:latest
- docker push $AWX_EE_DOCKER_IMAGE_EXTERN:latest
only:
refs:
- main
builder-job-devnso-automatic:
extends: .builder-job-devnso
only:
- master
changes:
- pip-requirements
- galaxy-requirements.yml
- ansible-builder/**/*
builder-job-devnso-manual:
extends: .builder-job-devnso
when: manual
.builder-job-prodnso:
extends: .builder-job
environment: prodnso
resource_group: prodnso
before_script:
- export STAGE=prodnso
only:
refs:
- prodnso
builder-job-prodnso-automatic:
extends: .builder-job-prodnso
only:
changes:
- pip-requirements
- galaxy-requirements.yml
- ansible-builder/**/*
builder-job-prodnso-manual:
extends: .builder-job-prodnso
when: manual
###############################################################################
# Environment specific AWX image from predefined variable $CI_ENVIRONMENT_NAME
.run-ansible:
image: $CI_ENVIRONMENT_NAME-$AWX_EE_DOCKER_IMAGE_EXTERN:latest
except:
- schedules
tags:
- dind
- harbor # 05.02.22 TODO some runners run into timeouts
###############################################################################
### http://patorjk.com/software/taag/#p=display&f=Doom&t=setup.yml
### _ _
### | | | |
### ___ ___| |_ _ _ _ __ _ _ _ __ ___ | |
### / __|/ _ \ __| | | | '_ \| | | | '_ ` _ \| |
### \__ \ __/ |_| |_| | |_) | |_| | | | | | | |
### |___/\___|\__|\__,_| .__(_)__, |_| |_| |_|_|
########
### https://patorjk.com/software/taag/#p=display&f=Doom&t=ansible%20-%20run
###
### _ _ _ _ _
### (_) | | | | | | |
### __ _ _ __ ___ _| |__ | | ___ ______ _ __ _ _ _ __ ______ ___ ___| |_ _ _ _ __ _ _ _ __ ___ | |
### / _` | '_ \/ __| | '_ \| |/ _ \ |______| | '__| | | | '_ \ |______| / __|/ _ \ __| | | | '_ \| | | | '_ ` _ \| |
### | (_| | | | \__ \ | |_) | | __/ | | | |_| | | | | \__ \ __/ |_| |_| | |_) | |_| | | | | | | |
### \__,_|_| |_|___/_|_.__/|_|\___| |_| \__,_|_| |_| |___/\___|\__|\__,_| .__(_)__, |_| |_| |_|_|
### | | __/ |
### |_| |___/
###
###############################################################################
.run-setup:
extends: .run-ansible
stage: run-setup
ansible-run-setup-1-dev:
image: $AWX_EE_DOCKER_IMAGE_EXTERN:latest
stage: ansible-run-setup
before_script:
- echo "CI_ENVIRONMENT_NAME=$CI_ENVIRONMENT_NAME"
script:
- "command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )"
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
- eval $(ssh-agent -s)
- 'echo "$GITLAB_SSH_KEY" | tr -d "\r" | ssh-add -'
- mkdir -p ~/.ssh
- chmod 0700 ~/.ssh
- '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" >> ~/.ssh/config'
- ssh-add -L
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml setup.yml --vault-password-file /tmp/vault-pass -t common -u gitlabci
script:
- echo "${ANSIBLE_VAULT_PASS_DEV}" > /tmp/vault-pass
- STAGE=dev && HETZNER_LABEL_SELECTOR="stage=${STAGE}" && ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml setup.yml --tags common --vault-password-file /tmp/vault-pass -u gitlabci
after_script:
- rm /tmp/vault-pass
except:
- schedules
run-setup-devnso:
extends: .run-setup
environment: devnso
resource_group: devnso
before_script:
- export STAGE=devnso
- echo "${ANSIBLE_VAULT_PASS_DEVNSO}" > /tmp/vault-pass
only:
- main
run-setup-prodnso:
extends: .run-setup
environment: prodnso
resource_group: prodnso
before_script:
- export STAGE=prodnso
- echo "${ANSIBLE_VAULT_PASS_PRODNSO}" > /tmp/vault-pass
only:
- prodnso
- master
- schedules
tags:
- dind
- harbor # 05.02.22 TODO some runners run into timeouts
resource_group: dev
run-setup-demompmx:
extends: .run-setup
environment: prodnso
resource_group: demompmx
ansible-run-setup-2-qa:
image: $AWX_EE_DOCKER_IMAGE_EXTERN:latest
stage: ansible-run-setup
before_script:
- export STAGE=demompmx
- echo "${ANSIBLE_VAULT_PASS_DEMOMPMX}" > /tmp/vault-pass
only:
- prodnso
###############################################################################
### http://patorjk.com/software/taag/#p=display&f=Doom&t=vpn.yml
### _
### | |
### __ ___ __ _ __ _ _ _ __ ___ | |
### \ \ / / '_ \| '_ \ | | | | '_ ` _ \| |
### \ V /| |_) | | | || |_| | | | | | | |
### \_/ | .__/|_| |_(_)__, |_| |_| |_|_|
### | | __/ |
### |_| |___/
###
###############################################################################
.vpn-config-update:
extends: .run-ansible
# A resource group ensures a job is mutually exclusive across different pipelines for the same project.
stage: run-update
script:
- "command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )"
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
- eval $(ssh-agent -s)
- 'echo "$GITLAB_SSH_KEY" | tr -d "\r" | ssh-add -'
- mkdir -p ~/.ssh
- chmod 0700 ~/.ssh
- '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" >> ~/.ssh/config'
- ssh-add -L
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml vpn.yml --vault-password-file /tmp/vault-pass -u gitlabci
script:
- echo "${ANSIBLE_VAULT_PASS_QA}" > /tmp/vault-pass
- STAGE=qa && HETZNER_LABEL_SELECTOR="stage=${STAGE}" && ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml setup.yml --tags common --vault-password-file /tmp/vault-pass -u gitlabci
after_script:
- rm /tmp/vault-pass
only:
changes:
- usser/**/wireguard.yml
except:
- qa
- schedules
tags:
- dind
- harbor # 05.02.22 TODO some runners run into timeouts
resource_group: qa
run-vpn-update-devnso:
extends: .vpn-config-update
environment: devnso
# A resource group ensures a job is mutually exclusive across different pipelines for the same project.
resource_group: devnso
before_script:
- export STAGE=devnso
- echo "${ANSIBLE_VAULT_PASS_DEVNSO}" > /tmp/vault-pass
only:
- main
run-vpn-update-prodnso:
extends: .vpn-config-update
environment: prodnso
# A resource group ensures a job is mutually exclusive across different pipelines for the same project.
resource_group: prodnso
ansible-run-setup-3-prodnso:
image: $AWX_EE_DOCKER_IMAGE_EXTERN:latest
stage: ansible-run-setup
before_script:
- export STAGE=prodnso
- echo "${ANSIBLE_VAULT_PASS_PRODNSO}" > /tmp/vault-pass
only:
- prodnso
###############################################################################
### http://patorjk.com/software/taag/#p=display&f=Doom&t=smardigo.yml
###
### _ _ _
### | (_) | |
### ___ _ __ ___ __ _ _ __ __| |_ __ _ ___ _ _ _ __ ___ | |
### / __| '_ ` _ \ / _` | '__/ _` | |/ _` |/ _ \| | | | '_ ` _ \| |
### \__ \ | | | | | (_| | | | (_| | | (_| | (_) | |_| | | | | | | |
### |___/_| |_| |_|\__,_|_| \__,_|_|\__, |\___(_)__, |_| |_| |_|_|
### __/ | __/ |
### |___/ |___/
###
###############################################################################
.run-management-update:
extends: .run-ansible
stage: run-update
script:
- "command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )"
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
- eval $(ssh-agent -s)
- 'echo "$GITLAB_SSH_KEY" | tr -d "\r" | ssh-add -'
- mkdir -p ~/.ssh
- chmod 0700 ~/.ssh
- '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" >> ~/.ssh/config'
- ssh-add -L
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- ansible-playbook -i stage-$STAGE smardigo.yml --vault-password-file=/tmp/vault-pass -l management -t update_configurations -u gitlabci
script:
- echo "${ANSIBLE_VAULT_PASS_PRODNSO}" > /tmp/vault-pass
- STAGE=prodnso && HETZNER_LABEL_SELECTOR="stage=${STAGE}" && ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml setup.yml --tags common --vault-password-file /tmp/vault-pass -u gitlabci
after_script:
- rm /tmp/vault-pass
only:
changes:
- smardigo/**/*
except:
- prodnso
- schedules
run-management-update-devnso:
extends: .run-management-update
environment: devnso
resource_group: devnso
before_script:
- export STAGE=devnso
- echo "${ANSIBLE_VAULT_PASS_DEVNSO}" > /tmp/vault-pass
only:
- main
run-management-update-prodnso:
extends: .run-management-update
environment: prodnso
tags:
- dind
- harbor # 05.02.22 TODO some runners run into timeouts
resource_group: prodnso
before_script:
- export STAGE=prodnso
- echo "${ANSIBLE_VAULT_PASS_PRODNSO}" > /tmp/vault-pass
only:
- prodnso
run-management-update-demompmx:
extends: .run-management-update
environment: prodnso
resource_group: demompmx
before_script:
- export STAGE=demompmx
- echo "${ANSIBLE_VAULT_PASS_DEMOMPMX}" > /tmp/vault-pass
only:
- prodnso
###############################################################################
### http://patorjk.com/software/taag/#p=display&f=Doom&t=patchday.yml
### _ _ _ _
### | | | | | | | |
### _ __ __ _| |_ ___| |__ __| | __ _ _ _ _ _ _ __ ___ | |
### | '_ \ / _` | __/ __| '_ \ / _` |/ _` | | | || | | | '_ ` _ \| |
### | |_) | (_| | || (__| | | | (_| | (_| | |_| || |_| | | | | | | |
### | .__/ \__,_|\__\___|_| |_|\__,_|\__,_|\__, (_)__, |_| |_| |_|_|
### | | __/ | __/ |
### |_| |___/ |___/
########
### https://patorjk.com/software/taag/#p=display&f=Doom&t=ansible%20-%20run
###
###############################################################################
### _ _ _ _ _ _ _
### (_) | | | | | | | | | | |
### __ _ _ __ ___ _| |__ | | ___ ______ _ __ _ _ _ __ ______ | | ___ _| |__ ___ _ __ _ __ ___| |_ ___ ___ _ _ _ __ ___ | |
### / _` | '_ \/ __| | '_ \| |/ _ \ |______| | '__| | | | '_ \ |______| | |/ / | | | '_ \ / _ \ '__| '_ \ / _ \ __/ _ \/ __|| | | | '_ ` _ \| |
### | (_| | | | \__ \ | |_) | | __/ | | | |_| | | | | | <| |_| | |_) | __/ | | | | | __/ || __/\__ \| |_| | | | | | | |
### \__,_|_| |_|___/_|_.__/|_|\___| |_| \__,_|_| |_| |_|\_\\__,_|_.__/ \___|_| |_| |_|\___|\__\___||___(_)__, |_| |_| |_|_|
### __/ |
### |___/
.run-patchday:
extends: .run-ansible
ansible-run-kubernetes-1-dev:
image: $AWX_EE_DOCKER_IMAGE_EXTERN:latest
stage: ansible-run-kubernetes
before_script:
- "command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )"
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
- eval $(ssh-agent -s)
- 'echo "$GITLAB_SSH_KEY" | tr -d "\r" | ssh-add -'
- mkdir -p ~/.ssh
- chmod 0700 ~/.ssh
- '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" >> ~/.ssh/config'
- ssh-add -L
timeout: 2h
run-patchday-harbor-devnso:
extends: .run-patchday
environment: devnso
stage: run-patchday-harbor
resource_group: devnso
script:
- export STAGE=devnso
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_DEVNSO}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'harbor'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "main"
run-patchday-elastic-devnso:
extends: .run-patchday
environment: devnso
stage: run-patchday-elastic
resource_group: devnso
script:
- export STAGE=devnso
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_DEVNSO}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'elastic'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "main"
when: manual # Disable automatic execution of this Stage. We have no Elastic hosts in devnso currently.
run-patchday-database-postgres-devnso:
extends: .run-patchday
environment: devnso
stage: run-patchday-database
resource_group: devnso
script:
- export STAGE=devnso
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_DEVNSO}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'postgres'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "main"
run-patchday-database-maria-devnso:
extends: .run-patchday
environment: devnso
stage: run-patchday-database
resource_group: devnso
script:
- export STAGE=devnso
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_DEVNSO}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'maria'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "main"
run-patchday-all-devnso:
extends: .run-patchday
environment: devnso
stage: run-patchday-all
resource_group: devnso
script:
- export STAGE=devnso
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_DEVNSO}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'all:!harbor:!elastic:!postgres:!maria:!k8s_cluster'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "main"
run-patchday-all-k8s-devnso:
extends: .run-patchday
environment: devnso
stage: run-patchday-all
resource_group: devnso
script:
- export STAGE=devnso
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_DEVNSO}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'k8s_cluster'
- echo "${ANSIBLE_VAULT_PASS_DEV}" > /tmp/vault-pass
- STAGE=dev && HETZNER_LABEL_SELECTOR="stage=${STAGE}" && ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml kubernetes.yml --vault-password-file /tmp/vault-pass -u gitlabci
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "main"
run-patchday-harbor-prodnso:
extends: .run-patchday
environment: prodnso
stage: run-patchday-harbor
resource_group: prodnso
script:
- export STAGE=prodnso
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_PRODNSO}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'harbor'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "prodnso"
run-patchday-elastic-prodnso:
extends: .run-patchday
environment: prodnso
stage: run-patchday-elastic
resource_group: prodnso
script:
- export STAGE=prodnso
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_PRODNSO}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'elastic'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "prodnso"
when: manual # Disable automatic execution of this Stage. We have no Elastic hosts in devnso currently.
run-patchday-database-postgres-prodnso:
extends: .run-patchday
environment: prodnso
stage: run-patchday-database
resource_group: prodnso
script:
- export STAGE=prodnso
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_PRODNSO}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'postgres'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "prodnso"
only:
- master
- schedules
tags:
- dind
- harbor # 05.02.22 TODO some runners run into timeouts
resource_group: dev
run-patchday-database-maria-prodnso:
extends: .run-patchday
environment: prodnso
stage: run-patchday-database
resource_group: prodnso
ansible-run-kubernetes-2-qa:
image: $AWX_EE_DOCKER_IMAGE_EXTERN:latest
stage: ansible-run-kubernetes
before_script:
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
- eval $(ssh-agent -s)
- 'echo "$GITLAB_SSH_KEY" | tr -d "\r" | ssh-add -'
- mkdir -p ~/.ssh
- chmod 0700 ~/.ssh
- '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" >> ~/.ssh/config'
script:
- export STAGE=prodnso
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_PRODNSO}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'maria'
- echo "${ANSIBLE_VAULT_PASS_QA}" > /tmp/vault-pass
- STAGE=qa && HETZNER_LABEL_SELECTOR="stage=${STAGE}" && ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml kubernetes.yml --vault-password-file /tmp/vault-pass -u gitlabci
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "prodnso"
only:
- qa
- schedules
tags:
- dind
- harbor # 05.02.22 TODO some runners run into timeouts
resource_group: qa
run-patchday-all-prodnso:
extends: .run-patchday
environment: prodnso
stage: run-patchday-all
resource_group: prodnso
ansible-run-kubernetes-3-prodnso:
image: $AWX_EE_DOCKER_IMAGE_EXTERN:latest
stage: ansible-run-kubernetes
before_script:
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
- eval $(ssh-agent -s)
- 'echo "$GITLAB_SSH_KEY" | tr -d "\r" | ssh-add -'
- mkdir -p ~/.ssh
- chmod 0700 ~/.ssh
- '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" >> ~/.ssh/config'
script:
- export STAGE=prodnso
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_PRODNSO}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'all:!harbor:!elastic:!postgres:!maria:!k8s_cluster'
- STAGE=prodnso && HETZNER_LABEL_SELECTOR="stage=${STAGE}" && ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml kubernetes.yml --vault-password-file /tmp/vault-pass -u gitlabci
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "prodnso"
run-patchday-all-k8s-prodnso:
extends: .run-patchday
environment: prodnso
stage: run-patchday-all
only:
- prodnso
- schedules
tags:
- dind
- harbor # 05.02.22 TODO some runners run into timeouts
resource_group: prodnso
script:
- export STAGE=prodnso
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_PRODNSO}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'k8s_cluster'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "prodnso"
run-patchday-database-postgres-demompmx:
extends: .run-patchday
environment: prodnso
stage: run-patchday-database
resource_group: demompmx
script:
- export STAGE=demompmx
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_DEMOMPMX}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'postgres'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "prodnso"
run-patchday-database-maria-demompmx:
extends: .run-patchday
environment: prodnso
stage: run-patchday-database
resource_group: demompmx
script:
- export STAGE=demompmx
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_DEMOMPMX}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'maria'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "prodnso"
run-patchday-all-demompmx:
extends: .run-patchday
environment: prodnso
stage: run-patchday-all
resource_group: demompmx
script:
- export STAGE=demompmx
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_DEMOMPMX}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'all:!postgres:!maria:!k8s_cluster'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "prodnso"
run-patchday-all-k8s-demompmx:
extends: .run-patchday
environment: prodnso
stage: run-patchday-all
resource_group: demompmx
script:
- export STAGE=demompmx
- export HETZNER_LABEL_SELECTOR="stage=${STAGE}"
- echo "${ANSIBLE_VAULT_PASS_DEMOMPMX}" > /tmp/vault-pass
- ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci --limit 'k8s_cluster'
after_script:
- rm /tmp/vault-pass
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "prodnso"
###############################################################################
### http://patorjk.com/software/taag/#p=display&f=Doom&t=hcloud-firewall.yml
###
### _ _ _ __ _ _ _ _
### | | | | | | / _(_) | | | | |
### | |__ ___| | ___ _ _ __| |______| |_ _ _ __ _____ ____ _| | | _ _ _ __ ___ | |
### | '_ \ / __| |/ _ \| | | |/ _` |______| _| | '__/ _ \ \ /\ / / _` | | || | | | '_ ` _ \| |
### | | | | (__| | (_) | |_| | (_| | | | | | | | __/\ V V / (_| | | || |_| | | | | | | |
### |_| |_|\___|_|\___/ \__,_|\__,_| |_| |_|_| \___| \_/\_/ \__,_|_|_(_)__, |_| |_| |_|_|
### __/ |
### |___/
########
### https://patorjk.com/software/taag/#p=display&f=Doom&t=patchday
### _ _ _
### | | | | | |
### _ __ __ _| |_ ___| |__ __| | __ _ _ _
### | '_ \ / _` | __/ __| '_ \ / _` |/ _` | | | |
### | |_) | (_| | || (__| | | | (_| | (_| | |_| |
### | .__/ \__,_|\__\___|_| |_|\__,_|\__,_|\__, |
### | | __/ |
### |_| |___/
###
###############################################################################
.run-hcloud-firewall:
extends: .run-ansible
stage: run-hcloud-firewall
ansible-patchday-1-dev:
image: $AWX_EE_DOCKER_IMAGE_EXTERN:latest
stage: ansible-patchday
before_script:
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
- eval $(ssh-agent -s)
- 'echo "$GITLAB_SSH_KEY" | tr -d "\r" | ssh-add -'
- mkdir -p ~/.ssh
- chmod 0700 ~/.ssh
- '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" >> ~/.ssh/config'
script:
- ansible-playbook -e "stage=${STAGE}" hcloud_firewall.yml --vault-password-file /tmp/vault-pass
- echo "${ANSIBLE_VAULT_PASS_DEV}" > /tmp/vault-pass
- STAGE=dev && HETZNER_LABEL_SELECTOR="stage=${STAGE}" && ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci
after_script:
- rm /tmp/vault-pass
except:
- schedules
run-hcloud-firewall-devnso:
extends: .run-hcloud-firewall
environment: devnso
resource_group: devnso
before_script:
- export STAGE=devnso
- echo "${ANSIBLE_VAULT_PASS_DEVNSO}" > /tmp/vault-pass
only:
- main
run-hcloud-firewall-prodnso:
extends: .run-hcloud-firewall
environment: prodnso
resource_group: prodnso
before_script:
- export STAGE=prodnso
- echo "${ANSIBLE_VAULT_PASS_PRODNSO}" > /tmp/vault-pass
only:
- prodnso
run-hcloud-firewall-demompmx:
extends: .run-hcloud-firewall
environment: prodnso
resource_group: demompmx
before_script:
- export STAGE=demompmx
- echo "${ANSIBLE_VAULT_PASS_DEMOMPMX}" > /tmp/vault-pass
when: manual
only:
- prodnso
###############################################################################
### http://patorjk.com/software/taag/#p=display&f=Doom&t=Digitialocean
###
### ______ _ _ _ _ _
### | _ (_) (_) | (_) | |
### | | | |_ __ _ _| |_ _ __ _| | ___ ___ ___ __ _ _ __
### | | | | |/ _` | | __| |/ _` | |/ _ \ / __/ _ \/ _` | '_ \
### | |/ /| | (_| | | |_| | (_| | | (_) | (_| __/ (_| | | | |
### |___/ |_|\__, |_|\__|_|\__,_|_|\___/ \___\___|\__,_|_| |_|
### __/ |
### |___/
###
###############################################################################
- master
tags:
- dind
- harbor # 05.02.22 TODO some runners run into timeouts
resource_group: dev
run-setup-digitalocean:
extends: .run-ansible
environment: devnso
stage: run-setup
ansible-patchday-2-qa:
image: $AWX_EE_DOCKER_IMAGE_EXTERN:latest
stage: ansible-patchday
before_script:
- export STAGE=devnso
- echo "${ANSIBLE_VAULT_PASS_DEVNSO}" > /tmp/vault-pass
script:
- "command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )"
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
- eval $(ssh-agent -s)
- 'echo "$GITLAB_SSH_KEY" | tr -d "\r" | ssh-add -'
- mkdir -p ~/.ssh
- chmod 0700 ~/.ssh
- '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" >> ~/.ssh/config'
- ssh-add -L
- ansible-playbook -i stage-digitalocean setup.yml --vault-password-file /tmp/vault-pass -t common -u gitlabci
- ansible-playbook -i stage-digitalocean smardigo.yml --vault-password-file /tmp/vault-pass -u gitlabci
script:
- echo "${ANSIBLE_VAULT_PASS_QA}" > /tmp/vault-pass
- STAGE=qa && HETZNER_LABEL_SELECTOR="stage=${STAGE}" && ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci
after_script:
- rm /tmp/vault-pass
when: manual
only:
- main
except:
- qa
- schedules
tags:
- dind
- harbor # 05.02.22 TODO some runners run into timeouts
resource_group: qa
run-patchday-devnso-digitalocean:
extends: .run-ansible
environment: devnso
stage: run-patchday-all
ansible-patchday-3-prodnso:
image: $AWX_EE_DOCKER_IMAGE_EXTERN:latest
stage: ansible-patchday
before_script:
- echo "${ANSIBLE_VAULT_PASS_DEVNSO}" > /tmp/vault-pass
script:
- "command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )"
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
- eval $(ssh-agent -s)
- 'echo "$GITLAB_SSH_KEY" | tr -d "\r" | ssh-add -'
- mkdir -p ~/.ssh
- chmod 0700 ~/.ssh
- '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" >> ~/.ssh/config'
- ssh-add -L
- ansible-playbook -i stage-digitalocean patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci
script:
- echo "${ANSIBLE_VAULT_PASS_PRODNSO}" > /tmp/vault-pass
- STAGE=prodnso && HETZNER_LABEL_SELECTOR="stage=${STAGE}" && ansible-playbook -i stage-${STAGE}-netgo-hcloud.yml patchday.yml --vault-password-file=/tmp/vault-pass -u gitlabci
after_script:
- rm /tmp/vault-pass
timeout: 2h
rules:
- if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == "main"
when: manual
only:
- prodnso
- schedules
tags:
- dind
- harbor # 05.02.22 TODO some runners run into timeouts
resource_group: prodnso

2
.gitmodules vendored

@ -1,4 +1,4 @@
[submodule "kubespray"]
path = kubespray
url = https://github.com/kubernetes-sigs/kubespray.git
branch = release-2.21
branch = v2.18.0

@ -6,7 +6,7 @@
## Install needed ansible collections / roles
ansible-galaxy install -r galaxy-requirements.yml -f
ansible-galaxy install -r galaxy-requirements.yml
# Setup
Create/Start servers for stage-dev
@ -91,6 +91,10 @@ if everything works fine, plz push the created docker container with:
# TODO
IPFire
149.233.6.129 - eShelter
212.121.131.106 - Siemensdamm
Prometheus (Grafana)
docker exec -i dev-prometheus-01-grafana sh -c 'grafana-cli plugins install grafana-piechart-panel'
docker restart dev-prometheus-01-grafana

@ -1,5 +1,6 @@
# Execution Environment for AWX
ansible-builder build --tag $CI_ENVIRONMENT_NAME-harbor-01.smardigo.digital/awx/awx-custom-ee:latest
docker login $CI_ENVIRONMENT_NAME-harbor-01.smardigo.digital
docker push $CI_ENVIRONMENT_NAME-harbor-01.smardigo.digital/awx/awx-custom-ee:latest
ansible-builder build --tag dev-harbor-01.smardigo.digital/awx/awx-custom-ee:latest
docker login dev-harbor-01.smardigo.digital
docker tag XXXXXXXX dev-harbor-01.smardigo.digital/awx/awx-custom-ee:latest
docker push dev-harbor-01.smardigo.digital/awx/awx-custom-ee

@ -1,5 +1,4 @@
git-core [platform:rpm]
python38-devel [platform:rpm compile]
subversion [platform:rpm]
subversion [platform:dpkg]
git-lfs [platform:rpm]
epel-release [platform:rpm]
tar [platform:rpm]

@ -1,2 +1 @@
/_build/
Dockerfile

@ -0,0 +1,36 @@
ARG EE_BASE_IMAGE=quay.io/ansible/ansible-runner:latest
ARG EE_BUILDER_IMAGE=quay.io/ansible/ansible-builder:latest
FROM $EE_BASE_IMAGE as galaxy
ARG ANSIBLE_GALAXY_CLI_COLLECTION_OPTS=
USER root
ADD _build /build
WORKDIR /build
RUN ansible-galaxy role install -r requirements.yml --roles-path /usr/share/ansible/roles
RUN ansible-galaxy collection install $ANSIBLE_GALAXY_CLI_COLLECTION_OPTS -r requirements.yml --collections-path /usr/share/ansible/collections
FROM $EE_BUILDER_IMAGE as builder
COPY --from=galaxy /usr/share/ansible /usr/share/ansible
ADD _build/requirements.txt requirements.txt
ADD _build/bindep.txt bindep.txt
RUN ansible-builder introspect --sanitize --user-pip=requirements.txt --user-bindep=bindep.txt --write-bindep=/tmp/src/bindep.txt --write-pip=/tmp/src/requirements.txt
RUN assemble
FROM $EE_BASE_IMAGE
USER root
COPY --from=galaxy /usr/share/ansible /usr/share/ansible
COPY --from=builder /output/ /output/
RUN /output/install-from-bindep && rm -rf /output/wheels
RUN alternatives --set python /usr/bin/python3
COPY --from=quay.io/project-receptor/receptor:0.9.7 /usr/bin/receptor /usr/bin/receptor
RUN mkdir -p /var/run/receptor
ADD run.sh /run.sh
CMD /run.sh
USER 1000
RUN git lfs install

@ -0,0 +1,2 @@
#! /bin/bash
ansible-runner worker --private-data-dir=/runner

@ -1,44 +1,16 @@
---
version: 3
build_arg_defaults:
ANSIBLE_GALAXY_CLI_COLLECTION_OPTS: "--pre"
version: 1
dependencies:
ansible_core:
package_pip: ansible-core~=2.12
ansible_runner:
package_pip: ansible-runner
galaxy: ../galaxy-requirements.yml
python: ../pip-requirements
python_interpreter:
package_system: "python311"
python_path: "/usr/bin/python3.11"
system: bindep.txt
images:
base_image:
name: quay.io/centos/centos:stream9-minimal
additional_build_files:
- src: ../ansible.cfg
dest: configs
options:
package_manager_path: /usr/bin/microdnf
additional_build_steps:
append_base:
- RUN $PYCMD -m pip install -U pip
prepend_galaxy:
- ADD _build/configs/ansible.cfg ~/.ansible.cfg
- RUN microdnf install -y git-lfs openssh-clients
- RUN mkdir -p /root/.ssh
- ADD id_ed25519 /root/.ssh/id_ed25519
- RUN chmod -R 700 /root/.ssh
- RUN ssh-keyscan git.dev-at.de >> /root/.ssh/known_hosts
- RUN eval $(ssh-agent) && ssh-add /root/.ssh/id_ed25519
append_final:
- COPY --from=quay.io/ansible/receptor:devel /usr/bin/receptor /usr/bin/receptor
append:
- RUN alternatives --set python /usr/bin/python3
- COPY --from=quay.io/project-receptor/receptor:0.9.7 /usr/bin/receptor /usr/bin/receptor
- RUN mkdir -p /var/run/receptor
- RUN git lfs install --system
- ADD run.sh /run.sh
- CMD /run.sh
- USER 1000
- RUN git lfs install

@ -1,4 +1,3 @@
exclude_paths:
- .ansible/
- test*.yml
- pmci-*.yml

@ -1,9 +1,8 @@
[defaults]
pipelining = True
host_key_checking = False
inventory_plugins = inventory_plugins
callbacks_enabled = profile_tasks
inventory_plugins = ./inventory_plugins
callbacks_enabled = timer
interpreter_python = auto_silent
log_path=last_ansible_run
forks = 30
ssh_args = -o ServerAliveInterval=10

@ -1,31 +0,0 @@
---
# configuring awx cluster
- name: 'apply awx config update to {{ host | default("all") }}'
hosts: '{{ host | default("kube_control_plane") }}'
serial: "{{ serial_number | default(10) }}"
vars:
ansible_ssh_host: "{{ stage_server_domain }}"
pre_tasks:
- name: "Check if ansible version is at least {{ ansible_minimal_version }}"
assert:
that:
- ansible_version.string is version(ansible_minimal_version, ">=")
msg: "The ansible version has to be at least {{ ansible_minimal_version }}"
tags:
- always
- name: "Import autodiscover pre-tasks"
import_tasks: tasks/autodiscover_pre_tasks.yml
become: false
tags:
- always
roles:
- role: kubernetes/awx
when: kubernetes_with_awx | default(false)
tags:
- never # shouldn't be done automatically due to removal logic
- update_awx_config

@ -0,0 +1,101 @@
---
# creates database backup
# - postgres
# - executed on stage specific server: {{ stage }}-postgres-01
# - creates database backup for specific database
# Parameters:
# playbook inventory
# stage := the name of the stage (e.g. dev, int, qa, prod)
# tenant_id := (unique key for the tenant, e.g. customer)
# cluster_name := (business name for the cluster, e.g. product, department )
# cluster_size := (WIP node count for the cluster)
# cluster_service := (service to setup, e.g. 'connect', ...)
# cluster_features := (optional features to use, e.g. ['wordpress', 'resubmission', ...])
# custom_backup_name := defines a substring for backup file => {{ stage }}_{{ tenant_id }}_{{ cluster_name }}_{{ cluster_service }}__gehtdichnixan.sql
# smardigo message callback
# scope_id := (scope id of the management process)
# process_instance_id := (process instance id of the management process)
# smardigo_management_action := (smardigo management action anme of the management process)
#############################################################
# Creating inventory dynamically for given parameters
#############################################################
- hosts: localhost
connection: local
gather_facts: false
pre_tasks:
- name: "Check if ansible version is at least 2.10.x"
assert:
that:
- ansible_version.major >= 2
- ansible_version.minor >= 10
msg: "The ansible version has to be at least ({{ ansible_version.full }})"
# add virtual server to load stage specific variables as context
- name: "Add <{{ stage }}-virtual-host-to-read-groups-vars> to hosts"
add_host:
name: "{{ stage }}-virtual-host-to-read-groups-vars"
groups:
- "stage_{{ stage }}"
changed_when: False
tasks:
- name: "Add postgres servers to hosts if necessary"
add_host:
name: "{{ stage }}-postgres-01"
groups:
- "stage_{{ stage }}"
- "{{ item }}"
changed_when: False
with_items: "{{ cluster_features }}"
when: item in ['connect', 'management_connect', 'keycloak', 'webdav', 'gitea', 'workflow_index', 'workflow_proxy', 'pdns']
#############################################################
# Creating database backups for created inventory
#############################################################
- hosts: "stage_{{ stage }}:!{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
remote_user: root
vars:
postgres_backup_state: dump
ansible_ssh_host: "{{ stage_server_domain }}"
roles:
- role: connect_postgres
when: "'connect' in group_names"
- role: gitea_postgres
when: "'gitea' in group_names"
- role: keycloak_postgres
when: "'keycloak' in group_names"
- role: webdav_postgres
when: "'webdav' in group_names"
- role: workflow_index_postgres
when: "'workflow_index' in group_names"
- role: workflow_proxy_postgres
when: "'workflow_proxy' in group_names"
#############################################################
# Sending smardigo management message to process
#############################################################
- hosts: "{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
gather_facts: false
connection: local
run_once: true
vars:
connect_jwt_username: "{{ management_admin_username }}"
tasks:
- name: "Sending smardigo management message to <{{ smardigo_management_url }}>"
include_tasks: tasks/smardigo_management_message.yml

@ -0,0 +1,136 @@
---
# creates databases on shared service servers
# - postgres
# - executed on stage specific server: {{ stage }}-postgres-01
# - creates databases to work with connect: {{ connect_postgres_database }}
# - creates databases to work with pdns: {{ pdns_postgres_database }}
# - creates databases to work with management connect: {{ management_connect_postgres_database }}
# - creates databases to work with shared webdav: {{ webdav_postgres_database }}
# - creates databases to work with shared keycloak: {{ keycloak_postgres_database }}
# - maria
# - executed on stage specific server: {{ stage }}-maria-01
# - creates databases to work with connect wordpress: {{ connect_wordpress_maria_database }}
# Parameters:
# playbook inventory
# stage := the name of the stage (e.g. dev, int, qa, prod)
# tenant_id := (unique key for the tenant, e.g. customer)
# cluster_name := (business name for the cluster, e.g. product, department )
# cluster_size := (WIP node count for the cluster)
# cluster_service := (service to setup, e.g. 'connect', ...)
# cluster_features := (optional features to use, e.g. ['wordpress', 'resubmission', ...])
# smardigo message callback
# scope_id := (scope id of the management process)
# process_instance_id := (process instance id of the management process)
# smardigo_management_action := (smardigo management action anme of the management process)
#############################################################
# Creating inventory dynamically for given parameters
#############################################################
- hosts: localhost
gather_facts: false
connection: local
pre_tasks:
- name: "Check if ansible version is at least 2.10.x"
assert:
that:
- ansible_version.major >= 2
- ansible_version.minor >= 10
msg: "The ansible version has to be at least ({{ ansible_version.full }})"
# add virtual server to load stage specific variables as context
- name: "Add <{{ stage }}-virtual-host-to-read-groups-vars> to hosts"
add_host:
name: "{{ stage }}-virtual-host-to-read-groups-vars"
groups:
- "stage_{{ stage }}"
changed_when: False
tasks:
- name: "Add postgres servers to hosts if necessary"
add_host:
name: "{{ stage }}-postgres-01"
groups:
- "stage_{{ stage }}"
- "{{ item }}"
changed_when: False
with_items: "{{ cluster_features }}"
when: item in ['connect', 'management_connect', 'keycloak', 'webdav', 'gitea', 'workflow_index', 'workflow_proxy', 'pdns']
- name: "Add maria servers to hosts if necessary"
add_host:
name: "{{ stage }}-maria-01"
groups:
- "stage_{{ stage }}"
- "{{ item }}"
changed_when: False
with_items: "{{ cluster_features }}"
when: item in ['connect_wordpress']
#############################################################
# Creating databases for created inventory
#############################################################
- hosts: "stage_{{ stage }}:!{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
remote_user: root
vars:
ansible_ssh_host: "{{ stage_server_domain }}"
pre_tasks:
- name: "Import autodiscover pre-tasks"
import_tasks: tasks/autodiscover_pre_tasks.yml
become: false
tags:
- always
roles:
- role: connect_postgres
when: "'connect' in group_names"
- role: pdns_postgres
vars:
initialize: True
when: "'pdns' in group_names"
- role: pdns_admin_postgres
vars:
initialize: True
when: "'pdns' in group_names"
- role: gitea_postgres
when: "'gitea' in group_names"
- role: keycloak_postgres
when: "'keycloak' in group_names"
- role: webdav_postgres
when: "'webdav' in group_names"
- role: workflow_index_postgres
when: "'workflow_index' in group_names"
- role: workflow_proxy_postgres
when: "'workflow_proxy' in group_names"
- role: connect_wordpress_maria
when: "'connect_wordpress' in group_names"
#############################################################
# Sending smardigo management message to process
#############################################################
- hosts: "{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
gather_facts: false
connection: local
run_once: true
vars:
connect_jwt_username: "{{ management_admin_username }}"
tasks:
- name: "Sending smardigo management message to <{{ smardigo_management_url }}>"
include_tasks: tasks/smardigo_management_message.yml

@ -1,14 +1,18 @@
---
# creates elastic objetcs for smardigo instances
# to empower dudes to find relevant logmessages faster and
# reduce/abolish "monkey business" in creating needed ES-related objects for (devops|admin)-dudes
# - executed on stage specific server: {{ stage }}-elastic-stack-kibana-01-kibana
# Parameters:
# playbook inventory
# stage := the name of the stage (e.g. devnso, qanso, prodnso)
# tenant := object with tenant related data
# key :=
# name :=
# cluster := object with cluster specific data (optional)
# ...
# data := object with action specific data (optional)
# ...
# stage := the name of the stage (e.g. dev, int, qa, prod)
# tenant_id := (unique key for the tenant, e.g. customer)
# cluster_name := (business name for the cluster, e.g. product, department )
# cluster_size := (WIP node count for the cluster)
# cluster_service := (service to setup, e.g. 'connect', ...)
# cluster_features := (optional features to use, e.g. ['wordpress', 'resubmission', ...])
# smardigo message callback
# scope_id := (scope id of the management process)
# process_instance_id := (process instance id of the management process)
@ -18,22 +22,46 @@
# Creating inventory dynamically for given parameters
#############################################################
- import_playbook: pmci-inventory-cluster.yml
- hosts: localhost
gather_facts: false
connection: local
pre_tasks:
- name: "Check if ansible version is at least 2.10.x"
assert:
that:
- ansible_version.major >= 2
- ansible_version.minor >= 10
msg: "The ansible version has to be at least ({{ ansible_version.full }})"
# add virtual server to load stage specific variables as context
- name: "Add <{{ stage }}-virtual-host-to-read-groups-vars> to hosts"
add_host:
name: "{{ stage }}-virtual-host-to-read-groups-vars"
groups:
- "stage_{{ stage }}"
changed_when: False
tasks:
- name: Add hosts
add_host:
name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ '%02d' | format(item|int) }}"
groups: "{{ ['stage_' + stage ] + [cluster_service] + cluster_features }}"
with_sequence: start=1 end={{ cluster_size | default(1) }}
changed_when: False
#############################################################
# Running the PMCI roles
# Creating kibana search objects for created inventory
#############################################################
- hosts: "pmci_server_selector"
- hosts: "stage_{{ stage }}:!{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
gather_facts: no
remote_user: root
vars:
ansible_connection: local
ansible_ssh_host: "{{ stage_server_domain }}"
tenant_id: "{{ tenant.key }}" # legacy paramater, backwards compatibility
cluster_name: "{{ cluster.key }}" # legacy paramater, backwards compatibility
cluster_service: "{{ cluster.service }}"
api_endpoint: '{{ stage }}-elastic-stack-kibana-01-kibana.{{ domain }}'
elastic_state: present
elastic_users:
-
@ -67,8 +95,6 @@
- all
dashboard:
- all
dev_tools:
- all
discover:
- all
indexPatterns:
@ -103,57 +129,12 @@
- actions
- osquery
- savedObjectsTagging
tasks:
- name: "Create Index Pattern for <{{ stage }}-{{ tenant_id }}-*>"
set_fact:
es_index_pattern_tenant: '{{ stage }}-{{ tenant_id }}-*'
elastic_index_patterns:
- id: "{{ stage }}-{{ tenant_id }}-*"
name: "{{ stage }}-{{ tenant_id }}"
search_name: "{{ stage }}-{{ tenant_id }}-*"
dashboard_name: "{{ stage }}-{{ tenant_id }}-*"
with_container_filter: false
when:
- cluster_name is not defined
- cluster_service is not defined
- name: "Create Index Pattern for <{{ stage }}-{{ tenant_id }}-{{ cluster_name }}>"
set_fact:
es_index_pattern_tenant: '{{ stage }}-{{ tenant_id }}-*'
elastic_index_patterns:
- id: "{{ stage }}-{{ tenant_id }}-*"
name: "{{ stage }}-{{ tenant_id }}"
search_name: "{{ stage }}-{{ tenant_id }}-*"
dashboard_name: "{{ stage }}-{{ tenant_id }}-*"
- id: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-*"
name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}"
search_name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-*"
dashboard_name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-*"
when:
- cluster_name is defined
- cluster_service is not defined
- name: "Create Index Pattern for <{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ cluster_service }}>"
set_fact:
es_index_pattern_tenant: ''
elastic_index_patterns:
- id: "{{ stage }}-{{ tenant_id }}-*"
name: "{{ stage }}-{{ tenant_id }}"
search_name: "{{ stage }}-{{ tenant_id }}-*"
dashboard_name: "{{ stage }}-{{ tenant_id }}-*"
- id: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-*"
name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}"
search_name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-*"
dashboard_name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-*"
es_index_pattern_tenant: '{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-*'
es_index_pattern_services:
- id: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-*-{{ cluster_service }}-*"
name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ cluster_service }}"
search_name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ cluster_service }}-*"
dashboard_name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ cluster_service }}-*"
when:
- cluster_service is defined
- cluster_name is defined
name: "{{ cluster_service }}"
tasks:
- name: "Do some stuff in elastic with spaces ... "
include_role:
name: kibana
@ -203,12 +184,22 @@
vars:
es_space: *es_space_name
es_indexpattern_title: '{{ es_indexpattern_name }}'
es_panel_uuid: "{{ 'panel_' + elastic_dashboard_name | to_uuid }}"
es_index_pattern_tenant_uuid: '{{ es_index_pattern_tenant | to_uuid }}'
es_panel_uuid: "{{ 'panel_' + es_dashboard_name | to_uuid }}"
tags:
- es-importobjects
#############################################################
# Sending smardigo management message to process
#############################################################
- hosts: "{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
gather_facts: false
connection: local
run_once: true
vars:
connect_jwt_username: "{{ management_admin_username }}"
- import_playbook: pmci-callback.yml
tasks:
- name: "Sending smardigo management message to <{{ smardigo_management_url }}>"
include_tasks: tasks/smardigo_management_message.yml

@ -0,0 +1,98 @@
---
# creates realm/clients on shared keycloak service
# - connect_realm: configuration to use with connect/wordpress
# Parameters:
# playbook inventory
# stage := the name of the stage (e.g. dev, int, qa, prod)
# tenant_id := (unique key for the tenant, e.g. customer)
# cluster_name := (business name for the cluster, e.g. product, department )
# cluster_size := (WIP node count for the cluster)
# cluster_service := (service to setup, e.g. 'connect', ...)
# cluster_features := (optional features to use, e.g. ['wordpress', 'resubmission', ...])
# playbook roles (keycloak / oidc)
# current_realm_name :=
# current_realm_display_name :=
# smardigo message callback
# scope_id := (scope id of the management process)
# process_instance_id := (process instance id of the management process)
# smardigo_management_action := (smardigo management action anme of the management process)
#############################################################
# Creating inventory dynamically for given parameters
#############################################################
- hosts: localhost
gather_facts: false
connection: local
pre_tasks:
- name: "Check if ansible version is at least 2.10.x"
assert:
that:
- ansible_version.major >= 2
- ansible_version.minor >= 10
msg: "The ansible version has to be at least ({{ ansible_version.full }})"
# add virtual server to load stage specific variables as context
- name: "Add <{{ stage }}-virtual-host-to-read-groups-vars> to hosts"
add_host:
name: "{{ stage }}-virtual-host-to-read-groups-vars"
groups:
- "stage_{{ stage }}"
changed_when: False
tasks:
- name: Add hosts
add_host:
name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ '%02d' | format(item|int) }}"
groups: "{{ ['stage_' + stage ] + [cluster_service] + cluster_features }}"
with_sequence: start=1 end={{ cluster_size | default(1) }}
changed_when: False
#############################################################
# Creating realms for created inventory
#############################################################
- hosts: "stage_{{ stage }}:!{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
gather_facts: false
connection: local
collections:
- hetzner.hcloud
- community.general
pre_tasks:
- name: "Import autodiscover pre-tasks"
import_tasks: tasks/autodiscover_pre_tasks.yml
become: false
tags:
- always
roles:
- role: connect_realm
when: '"connect" in group_names'
- role: gitea_realm
when: '"gitea" in group_names'
- role: workflow_proxy_realm
when: '"workflow-proxy" in group_names'
#############################################################
# Sending smardigo management message to process
#############################################################
- hosts: "{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
gather_facts: false
connection: local
run_once: true
vars:
connect_jwt_username: "{{ management_admin_username }}"
tasks:
- name: "Sending smardigo management message to <{{ smardigo_management_url }}>"
include_tasks: tasks/smardigo_management_message.yml

@ -1,25 +1,16 @@
---
# ****** ** ****** ** ** ** ** ******* #
# /*////** **** **////** /** ** /** /** /**////** #
# /* /** **//** ** // /** ** /** /** /** /** #
# /****** ** //** /** /**** /** /** /******* #
# /*//// ** ********** /** /**/** /** /** /**//// #
# /* /** /**//////** //** ** /**//** /** /** /** #
# /******* /** /** //****** /** //** //******* /** #
# /////// // // ////// // // /////// // #
# creates remote database backup
# - postgres
# - executed on stage specific server: {{ shared_service_postgres_secondary }} (currently: slave)
# - executed on stage specific server: {{ stage }}-postgres-02 (currently: slave)
# - creates database backup for ALL databases in postgres-server
# - mariadb
# - executed on stage specific server: {{ shared_service_maria_primary }}
# - executed on stage specific server: {{ stage }}-maria-01
# - creates database backup for ALL databases in mariadb-server
# Parameters:
# playbook inventory
# stage := the name of the stage (e.g. devnso, qanso, prodnso)
# database_engine := the database engine to generate a complete backup for (e.g. postgres, maria)
# stage := the name of the stage (e.g. dev, int, qa, prod)
# smardigo message callback
# scope_id := (scope id of the management process)
# process_instance_id := (process instance id of the management process)
@ -33,8 +24,15 @@
connection: local
gather_facts: false
tasks:
# add virtual server to load stage specific variables as context
pre_tasks:
- name: "Check if ansible version is at least 2.10.x"
assert:
that:
- ansible_version.major >= 2
- ansible_version.minor >= 10
msg: "The ansible version has to be at least ({{ ansible_version.full }})"
# add virtual server to load stage specific variables as context
- name: "Add <{{ stage }}-virtual-host-to-read-groups-vars> to hosts"
add_host:
name: "{{ stage }}-virtual-host-to-read-groups-vars"
@ -42,44 +40,21 @@
- "stage_{{ stage }}"
changed_when: False
- hosts: "{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
gather_facts: false
connection: local
pre_tasks:
- name: "Import constraints check"
import_tasks: tasks/constraints_check.yml
become: false
tags:
- always
- name: "Import autodiscover pre-tasks"
import_tasks: tasks/autodiscover_pre_tasks.yml
become: false
tags:
- always
tasks:
- name: "Add {{ database_engine }} servers to hosts if necessary"
add_host:
name: "{{ item.name }}"
name: "{{ stage }}-{{ database_engine }}-01"
groups:
- "stage_{{ stage }}"
- "{{ database_engine }}"
when:
- (database_engine == 'postgres' and item.service == 'postgres' and (item.role | default('')) == 'slave')
or (database_engine == 'maria' and item.service == 'maria')
loop: "{{ stage_server_infos }}"
- name: "Add 'backup' servers to hosts if necessary"
- '{{ database_engine }}'
changed_when: False
- name: "Add 'storage' servers to hosts if necessary"
add_host:
name: "{{ stage }}-backup-01"
name: "{{ stage }}-fgrz-01"
groups:
- "stage_{{ stage }}"
- "backup"
when:
- "'postgres' in groups or 'maria' in groups"
- storage
changed_when: False
##############################################################
## Creating remote database backups for created inventory
@ -87,47 +62,39 @@
- hosts: "postgres:maria"
serial: "{{ serial_number | default(1) }}"
gather_facts: false
vars:
ansible_ssh_host: "{{ stage_server_domain }}"
current_date_time: "{{ get_current_date_time }}"
tasks:
- name: "Trigger backup mechanism"
include_role:
name: "{{ database_engine }}"
name: '{{ database_engine }}'
tasks_from: _create_backup
#############################################################
# Syncing remote database backups to backup server
# Syncing remote database backups to storage server
#############################################################
- hosts: "postgres:maria:backup"
- hosts: "postgres:maria:storage"
serial: "{{ serial_number | default(5) }}"
gather_facts: false
vars:
ansible_ssh_host: "{{ stage_server_domain }}"
backup_server_system_user: "backuphamster"
storageserver_system_user: 'backuphamster'
tasks:
# I could not get it up and running with <synchronize> module
# to sync data from remote server A to remote server B
- name: "Syncing remote backups"
become: yes
become_user: "{{ backup_server_system_user }}"
shell: "/home/{{ backup_server_system_user }}/pull_remote_backups.sh {{ item }} {{ stage }} {{ database_engine }}"
with_items: "{{ (groups['postgres'] | default([])) + (groups['maria'] | default([])) }}"
become_user: '{{ storageserver_system_user }}'
vars:
database_server_ip: "{{ stage }}-{{ database_engine }}-01.{{ domain }}"
shell: '/home/{{ storageserver_system_user }}/pull_remote_backups.sh {{ database_server_ip }} {{ stage }} {{ database_engine }}'
when:
- inventory_hostname in groups['backup']
- inventory_hostname in groups['storage']
- name: "Cleanup remote backup dirs: {{ database_engine }}"
become: yes
file:
path: "{{ backup_directory }}/{{ database_engine }}/{{ get_current_date }}"
path: '{{ backup_directory }}/{{ database_engine }}/{{ ansible_date_time.date }}'
state: absent
when:
- not inventory_hostname in groups['backup']
- inventory_hostname in groups [database_engine]
- not inventory_hostname in groups['storage']
#############################################################
# Sending smardigo management message to process
@ -142,5 +109,5 @@
connect_jwt_username: "{{ management_admin_username }}"
tasks:
- name: "Sending smardigo management message to <{{ shared_service_url_management }}>"
- name: "Sending smardigo management message to <{{ smardigo_management_url }}>"
include_tasks: tasks/smardigo_management_message.yml

@ -1,14 +1,12 @@
---
# Parameters:
# playbook inventory
# stage := the name of the stage (e.g. devnso, qanso, prodnso)
# tenant := object with tenant related data
# key :=
# name :=
# cluster := object with cluster specific data (optional)
# ...
# data := object with action specific data (optional)
# ...
# stage := the name of the stage (e.g. dev, int, qa, prod)
# tenant_id := (unique key for the tenant, e.g. customer)
# cluster_name := (business name for the cluster, e.g. product, department )
# cluster_size := (WIP node count for the cluster)
# cluster_service := (service to setup, e.g. 'connect', ...)
# smardigo message callback
# scope_id := (scope id of the management process)
# process_instance_id := (process instance id of the management process)
@ -18,16 +16,43 @@
# Creating inventory dynamically for given parameters
#############################################################
- import_playbook: pmci-inventory-cluster.yml
- hosts: localhost
gather_facts: false
connection: local
pre_tasks:
- name: "Check if ansible version is at least 2.10.x"
assert:
that:
- ansible_version.major >= 2
- ansible_version.minor >= 10
msg: "The ansible version has to be at least ({{ ansible_version.full }})"
# add virtual server to load stage specific variables as context
- name: "Add <{{ stage }}-virtual-host-to-read-groups-vars> to hosts"
add_host:
name: "{{ stage }}-virtual-host-to-read-groups-vars"
groups:
- "stage_{{ stage }}"
changed_when: False
tasks:
- name: Add hosts
add_host:
name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ '%02d' | format(item|int) }}"
groups:
- "stage_{{ stage }}"
- "{{ cluster_service }}"
with_sequence: start=1 end={{ cluster_size | default(1) }}
changed_when: False
#############################################################
# Creating servers for created inventory
#############################################################
- hosts: "pmci_server_selector"
- hosts: "stage_{{ stage }}:!{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(5) }}"
gather_facts: false
remote_user: root
pre_tasks:
- name: Get all Firewalls from Hetzner
@ -63,20 +88,17 @@
- update_networks
roles:
- role: hetzner-ansible-hcloud
- role: hetzner-ansible-dns
vars:
record_data: "{{ stage_server_ip }}"
record_name: "{{ inventory_hostname }}"
- role: hcloud
#############################################################
# Provisioning servers for created inventory
#############################################################
- hosts: "pmci_server_selector"
- hosts: "stage_{{ stage }}:!{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
remote_user: root
vars:
ansible_ssh_host: "{{ stage_server_domain }}"
pre_tasks:
- name: Remove outdated dependencies
@ -91,7 +113,6 @@
'docker-logrotate',
'docker-engine',
'smartmontools',
'mc',
]
state: 'absent'
when: ansible_distribution == "Ubuntu"
@ -102,34 +123,34 @@
tags:
- always
- name: "Setting default variables pre-tasks"
import_tasks: tasks/pmci_set_default_variables.yml
become: false
tags:
- always
roles:
- role: ansible-role-docker
when:
- docker_enabled
- role: hetzner-ansible-common
- role: devsec.hardening.ssh_hardening
tags:
- ssh_hardening
- role: common
- role: hetzner-ansible-filebeat
- role: filebeat
when: filebeat_enabled | default(True)
- role: hetzner-ansible-node-exporter
- role: node_exporter
when: node_exporter_enabled | default(True)
- role: hetzner-ansible-traefik
- role: traefik
when: traefik_enabled | default(True)
#############################################################
# Sending smardigo management message to process
#############################################################
- import_playbook: pmci-callback.yml
- hosts: "{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
gather_facts: false
connection: local
run_once: true
vars:
connect_jwt_username: "{{ management_admin_username }}"
tasks:
- name: "Sending smardigo management message to <{{ smardigo_management_url }}>"
include_tasks: tasks/smardigo_management_message.yml

@ -0,0 +1,86 @@
---
# Parameters:
# playbook inventory
# stage := the name of the stage (e.g. dev, int, qa, prod)
# tenant_id := (unique key for the tenant, e.g. customer)
# cluster_name := (business name for the cluster, e.g. product, department )
# cluster_size := (WIP node count for the cluster)
# cluster_service := (service to setup, e.g. 'connect', ...)
# cluster_features := (optional features to use, e.g. ['wordpress', 'resubmission', ...])
# smardigo message callback
# scope_id := (scope id of the management process)
# process_instance_id := (process instance id of the management process)
# smardigo_management_action := (smardigo management action anme of the management process)
#############################################################
# Creating inventory dynamically for given parameters
#############################################################
- hosts: localhost
gather_facts: false
connection: local
pre_tasks:
- name: "Check if ansible version is at least 2.10.x"
assert:
that:
- ansible_version.major >= 2
- ansible_version.minor >= 10
msg: "The ansible version has to be at least ({{ ansible_version.full }})"
# add virtual server to load stage specific variables as context
- name: "Add <{{ stage }}-virtual-host-to-read-groups-vars> to hosts"
add_host:
name: "{{ stage }}-virtual-host-to-read-groups-vars"
groups:
- "stage_{{ stage }}"
changed_when: False
tasks:
- name: Add hosts
add_host:
name: "{{ stage }}-{{ tenant_id }}-{{ cluster_name }}-{{ '%02d' | format(item|int) }}"
groups: "{{ ['stage_' + stage ] + [cluster_service] + cluster_features }}"
with_sequence: start=1 end={{ cluster_size | default(1) }}
changed_when: False
#############################################################
# Creating services for created inventory
#############################################################
- hosts: "stage_{{ stage }}:!{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
remote_user: root
vars:
ansible_ssh_host: "{{ stage_server_domain }}"
pre_tasks:
- name: "Import autodiscover pre-tasks"
import_tasks: tasks/autodiscover_pre_tasks.yml
become: false
tags:
- always
roles:
- role: connect
when: "'connect' in group_names"
- role: connect_wordpress
when: "'connect_wordpress' in group_names"
#############################################################
# Sending smardigo management message to process
#############################################################
- hosts: "{{ stage }}-virtual-host-to-read-groups-vars"
serial: "{{ serial_number | default(1) }}"
gather_facts: false
connection: local
run_once: true
vars:
connect_jwt_username: "{{ management_admin_username }}"
tasks:
- name: "Sending smardigo management message to <{{ smardigo_management_url }}>"
include_tasks: tasks/smardigo_management_message.yml

@ -0,0 +1,120 @@
# relay config sections
skopeo:
# path to the skopeo binary; defaults to 'skopeo', in which case it needs to
# be in PATH
binary: skopeo
# directory under which to look for client certs & keys, as well as CA certs
# (see note below)
certs-dir: /etc/skopeo/certs.d
docker:
# Docker host to use as the relay
dockerhost: unix:///var/run/docker.sock
# Docker API version to use, defaults to 1.24
api-version: 1.24
# settings for image matching (see below)
lister:
# maximum number of repositories to list, set to -1 for no limit, defaults to 100
maxItems: 100
# for how long a repository list will be re-used before retrieving again;
# specify as a Go duration value ('s', 'm', or 'h'), set to -1 for not caching,
# defaults to 1h
cacheDuration: 1h
# list of sync tasks
tasks:
- name: smardigo # required
# interval in seconds at which the task should be run; when omitted,
# the task is only run once at start-up
interval: 600
# determines whether for this task, more verbose output should be
# produced; defaults to false when omitted
verbose: true
# 'source' and 'target' are both required and describe the source and
# target registries for this task:
# - 'registry' points to the server; required
# - 'auth' contains the base64 encoded credentials for the registry
# in JSON form {"username": "...", "password": "..."}
# - 'auth-refresh' specifies an interval for automatic retrieval of
# credentials; only for AWS ECR (see below)
# - 'skip-tls-verify' determines whether to skip TLS verification for the
# registry server (only for 'skopeo', see note below); defaults to false
source:
registry: docker.dev-at.de
auth: eyJ1c2VybmFtZSI6ImFkbWluIiwicGFzc3dvcmQiOiJRNHB6aWhWRFl3eUthZEM3NmxiNCJ9Cg==
target:
registry: dev-harbor-01.smardigo.digital
auth: eyJ1c2VybmFtZSI6InJvYm90JGFuc2libGUiLCJwYXNzd29yZCI6IlAwRmJkb2tSc3V0V2lvVWl2cmI5TzVET05HY2FHNk1KIn0K
# 'mappings' is a list of 'from':'to' pairs that define mappings of image
# paths in the source registry to paths in the destination; 'from' is
# required, while 'to' can be dropped if the path should remain the same as
# 'from'. Regular expressions are supported in both fields (read on below
# for more details). Additionally, the tags being synced for a mapping can
# be limited by providing a 'tags' list. This list may contain semver and
# regular expressions filters (see below). When omitted, all image tags are
# synced.
mappings:
- from: smardigo/connect-whitelabel-app
to: smardigo/connect-whitelabel-app
tags:
- 'regex: ^(latest)|(([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+))$'
- from: smardigo/iam-app
to: smardigo/iam-app
tags:
- 'regex: ^(latest)|(([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+))$'
- from: smardigo/smardigo-webdav-app
to: smardigo/smardigo-webdav-app
tags:
- 'regex: ^(latest)|(([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+))$'
- from: smardigo/smardigo-workflow-proxy-app
to: smardigo/smardigo-workflow-proxy-app
tags:
- 'regex: ^(latest)|(([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+))$'
- name: sensw
interval: 600
verbose: true
source:
registry: docker.dev-at.de
auth: eyJ1c2VybmFtZSI6ImFkbWluIiwicGFzc3dvcmQiOiJRNHB6aWhWRFl3eUthZEM3NmxiNCJ9Cg==
target:
registry: dev-harbor-01.smardigo.digital
auth: eyJ1c2VybmFtZSI6InJvYm90JGFuc2libGUiLCJwYXNzd29yZCI6IlAwRmJkb2tSc3V0V2lvVWl2cmI5TzVET05HY2FHNk1KIn0K
mappings:
- from: smardigo/sensw-app
to: sensw/sensw-app
tags:
- 'regex: ^(latest)|(([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+))$'
- from: smardigo/sensw-bda-adapter-app
to: sensw/sensw-bda-adapter-app
tags:
- 'regex: ^(latest)|(([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+))$'
- from: smardigo/sensw-profiskal-export-app
to: sensw/sensw-profiskal-export-app
tags:
- 'regex: ^(latest)|(([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+))$'
- name: ssp
interval: 600
verbose: true
source:
registry: docker.dev-at.de
auth: eyJ1c2VybmFtZSI6ImFkbWluIiwicGFzc3dvcmQiOiJRNHB6aWhWRFl3eUthZEM3NmxiNCJ9Cg==
target:
registry: dev-harbor-01.smardigo.digital
auth: eyJ1c2VybmFtZSI6InJvYm90JGFuc2libGUiLCJwYXNzd29yZCI6IlAwRmJkb2tSc3V0V2lvVWl2cmI5TzVET05HY2FHNk1KIn0K
mappings:
- from: smardigo/ssp-connect-app
to: ssp/ssp-connect-app
tags:
- 'regex: ^(latest)|(([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+))$'
- from: smardigo/smardigo-action-si-dyns-app
to: ssp/smardigo-action-si-dyns-app
tags:
- 'regex: ^(latest)|(([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+)\.([0-9]|[1-9][0-9]+))$'

@ -0,0 +1,11 @@
version: '3.7'
services:
local-dregsy:
image: "xelalex/dregsy:0.4.1"
volumes:
- "./config.yaml:/config.yaml:ro"
- "/var/run/docker.sock:/var/run/docker.sock:ro"
environment:
LOG_LEVEL: "debug"
LOG_FORMAT: "json"

@ -1,9 +1,3 @@
#!/bin/bash
if [ "x$1" == "x" ];then
echo "Stage as param \$1 is missing. exit"
exit 1
fi
docker run -v `pwd`/templates/elastic-certs:/certs -v `pwd`/templates/elastic-certs/$1-instances.yaml:/usr/share/elasticsearch/config/certificates/$1-instances.yml docker.elastic.co/elasticsearch/elasticsearch:7.16.3 /bin/sh "/certs/certutil.sh" $1
docker run -v `pwd`/templates/elastic-certs:/certs -v `pwd`/templates/elastic-certs/$1-instances.yaml:/usr/share/elasticsearch/config/certificates/$1-instances.yml docker.elastic.co/elasticsearch/elasticsearch:7.12.0 /bin/sh "/certs/certutil.sh" $1

@ -17,11 +17,14 @@
default: 'no'
pre_tasks:
- name: "Import constraints check"
import_tasks: tasks/constraints_check.yml
- name: "Check if ansible version is at least 2.10.x"
assert:
that:
- ansible_version.major >= 2
- ansible_version.minor >= 10
msg: "The ansible version has to be at least ({{ ansible_version.full }})"
delegate_to: 127.0.0.1
become: false
tags:
- always
- name: "Import autodiscover pre-tasks"
import_tasks: tasks/autodiscover_pre_tasks.yml
@ -33,13 +36,13 @@
- block:
- name: "Delete server <{{ inventory_hostname }}>"
include_role:
name: hetzner-ansible-hcloud
name: hcloud
tasks_from: _set_server_state
vars:
- server_state: "absent"
- name: "Delete DNS entry <{{ inventory_hostname }}> for <{{ domain }}>"
include_role:
name: hetzner-ansible-dns
name: sma_digitalocean
tasks_from: _remove_dns
vars:
record_to_remove: '{{ inventory_hostname }}'

@ -0,0 +1,60 @@
---
- name: 'apply setup to {{ host | default("all") }}'
hosts: '{{ host | default("all") }}'
serial: "{{ serial_number | default(5) }}"
tasks:
- set_fact:
promethues_endpoints_all_stages:
- "{{ lookup('community.general.dig', 'dev-prometheus-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'qa-prometheus-01.' + domain ) }}"
- "{{ lookup('community.general.dig', 'prodnso-prometheus-01.' + domain ) }}"
- name: "Allow SSH in UFW"
ufw:
rule: limit
port: 22
proto: tcp
- name: "Allow port 9100 for node-exporter in UFW"
ufw:
rule: allow
port: 9100
proto: tcp
src: "{{ item }}"
loop: "{{ promethues_endpoints_all_stages }}"
- name: "Allow port 9115 for blackbox-exporter in UFW"
ufw:
rule: allow
port: 9115
proto: tcp
src: "{{ item }}"
loop: "{{ promethues_endpoints_all_stages + ip_whitelist_admins}}"
- name: "Set firewall default policy"
ufw:
state: enabled
policy: reject
- name: "configure ssh_hardening"
include_role:
# include role from collection called 'devsec'
name: devsec.hardening.ssh_hardening
apply:
tags:
- ssh_hardening
tags:
- ssh_hardening
- name: "Install node-exporter via include_role"
include_role:
name: cloudalchemy.node-exporter
- name: "Install blackbox-exporter via include_role"
include_role:
name: cloudalchemy.blackbox-exporter
apply:
tags:
- blackbox
tags:
- blackbox

@ -1,65 +1,37 @@
---
roles:
- name: geerlingguy.docker
version: 6.0.3
- name: cloudalchemy.blackbox-exporter
- name: geerlingguy.docker
version: 4.1.1
- name: geerlingguy.kubernetes
version: 7.1.0
- name: geerlingguy.redis
version: 1.7.0
- name: idealista.prometheus_redis_exporter_role
version: 2.1.0
- name: cloudalchemy.node-exporter
version: 2.0.0
scm: git
src: https://github.com/cloudalchemy/ansible-node-exporter
- name: cloudalchemy.blackbox-exporter
version: 1.0.0
scm: git
src: https://github.com/cloudalchemy/ansible-blackbox-exporter
- name: postfix
version: v3.6.2
scm: git
- name: postfix
version: v3.6.1
src: https://github.com/Oefenweb/ansible-postfix.git
- name: hetzner-ansible-dns
version: 0.0.5
scm: git
src: git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-dns-role.git
- name: hetzner-ansible-hcloud
version: 0.0.4
scm: git
src: git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-hcloud-role.git
- name: hetzner-ansible-common
version: 0.0.6
scm: git
src: git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-common-role.git
- name: hetzner-ansible-filebeat
version: 0.0.7
scm: git
src: git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-filebeat-role.git
- name: hetzner-ansible-metricbeat
version: 0.0.5
scm: git
src: git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-metricbeat-role.git
- name: hetzner-ansible-node-exporter
version: 0.0.4
scm: git
src: git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-node-exporter-role.git
- name: hetzner-ansible-traefik
version: 0.0.5
scm: git
src: git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-traefik-role.git
- name: hetzner-ansible-sma-deploy
version: 0.0.4
scm: git
src: git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-sma-deploy-role.git
collections:
- name: hetzner.hcloud
version: 1.8.2
- name: community.general
version: 7.0.1
- name: community.docker
version: 3.8.1
- name: kubernetes.core
version: 2.4.0
- name: community.mysql
version: 3.7.1
- name: community.postgresql
version: 2.4.1
- name: community.digitalocean
version: 1.23.0
- name: devsec.hardening
version: 8.7.0
- name: hetzner.hcloud
version: 1.6.0
- name: community.general
- name: community.docker
version: 2.1.1
- name: kubernetes.core
- name: community.mysql
- name: community.postgresql
- name: community.digitalocean
version: 1.11.0
- name: devsec.hardening
version: 7.12.0
src: https://github.com/dev-sec/ansible-collection-hardening
- name: community.dns
version: 2.5.4

@ -1,60 +0,0 @@
---
# Parameters:
# playbook inventory
# stage := the name of the stage (e.g. devnso, qanso, prodnso)
# environment variable
# GITLAB_API_TOKEN := Access token from gitlab
#############################################################
# Creating inventory dynamically for given parameters
#############################################################
- hosts: localhost
gather_facts: false
connection: local
tasks:
- name: Add hosts
add_host:
name: "{{ stage }}-gitlab"
groups: "{{ ['stage_' + stage ] }}"
#############################################################
# Creating gitlab mirrors for current stage
#############################################################
- hosts: "stage_{{ stage }}"
serial: "{{ serial_number | default(1) }}"
gather_facts: false
connection: local
vars:
projects:
- id: 1210
name: argocd
- id: 1216
name: operator-awx
- id: 1212
name: operator-jaeger
- id: 1231
name: operator-knative
- id: 1233
name: smardigo-awx
- id: 1232
name: smardigo-jaeger
pre_tasks:
- name: "Add repository remote mirror to project"
delegate_to: 127.0.0.1
become: false
uri:
url: "https://git.dev-at.de/api/v4/projects/{{ item.id }}/remote_mirrors"
method: POST
body_format: json
body:
enabled: true
only_protected_branches: true
url: "https://{{ gitea_admin_username }}:{{ gitea_admin_password }}@{{ shared_service_hostname_gitea }}/argocd/{{ item.name }}.git"
headers:
PRIVATE-TOKEN: "{{ lookup('env', 'GITLAB_API_TOKEN') }}"
status_code: [201]
loop: "{{ projects }}"

@ -1,6 +0,0 @@
#!/bin/bash
git clone git@git.dev-at.de:smardigo-hetzner/k8s-clusters/devnso-argocd.git ../devnso-argocd
git clone git@git.dev-at.de:smardigo-hetzner/k8s-clusters/devssp-argocd.git ../devssp-argocd
git clone git@git.dev-at.de:smardigo-hetzner/k8s-clusters/prodnso-argocd.git ../prodnso-argocd
git clone git@git.dev-at.de:smardigo-hetzner/k8s-clusters/demompmx-argocd.git ../demompmx-argocd

@ -1,10 +0,0 @@
#!/bin/bash
git clone git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-common-role.git ../hetzner-ansible-common-role
git clone git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-dns-role.git ../hetzner-ansible-dns-role
git clone git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-filebeat-role.git ../hetzner-ansible-filebeat-role
git clone git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-hcloud-role.git ../hetzner-ansible-hcloud-role
git clone git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-metricbeat-role.git ../hetzner-ansible-metricbeat-role
git clone git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-node-exporter-role.git ../hetzner-ansible-node-exporter-role.
git clone git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-sma-deploy-role.git ../hetzner-ansible-sma-deploy-role
git clone git@git.dev-at.de:smardigo-hetzner/ansible/hetzner-ansible-roles/hetzner-ansible-traefik-role.git ../hetzner-ansible-traefik-role

@ -1,180 +0,0 @@
---
argocd_oidc_realm: "stage-argocd"
argocd_oidc_client_id: "stage-argocd"
argocd_oidc_client_secret: "{{ argocd_oidc_client_secret_vault | default(argo_keycloak_client_secret_vault) }}" # backwards compatibility
argocd_oidc_admin_username: "argocd-admin"
argocd_oidc_admin_password: "{{ argocd_oidc_admin_password_vault | default(argocd_admin_password_vault) }}" # backwards compatibility
argocd_oidc_admin_email: "{{ devops_email_address }}"
argocd_server_admin_password: "{{ argocd_server_admin_password_vault }}"
k8s_argocd_helm__name: "argo-cd"
k8s_argocd_helm__release_namespace: "argo-cd"
k8s_argocd_helm__chart_version: 5.19.0
# https://github.com/argoproj/argo-helm/tree/master/charts/argo-cd
k8s_argocd_helm__release_values:
repoServer:
serviceAccount:
create: true
name: argo-cd-argocd-repo-server
rbac:
- apiGroups:
- ""
resources:
- secrets
verbs:
- get
logLevel: warn
logFormat: json
env:
- name: ARGOCD_MAX_CONCURRENT_LOGIN_REQUESTS_COUNT
value: "0"
- name: ARGOCD_EXEC_TIMEOUT
value: "300s"
- name: XDG_CONFIG_HOME
value: /.config
- name: GNUPGHOME
value: /home/argocd/.gnupg
- name: HELM_PLUGINS
value: /custom-tools/helm-plugins/
- name: HELM_SECRETS_SOPS_PATH
value: /custom-tools/sops
- name: HELM_SECRETS_VALS_PATH
value: /custom-tools/vals
- name: HELM_SECRETS_KUBECTL_PATH
value: /custom-tools/kubectl
- name: HELM_SECRETS_CURL_PATH
value: /custom-tools/curl
# https://github.com/jkroepke/helm-secrets/wiki/Security-in-shared-environments
- name: HELM_SECRETS_KEY_LOCATION_PREFIX
value: "/sops-gpg/"
- name: HELM_SECRETS_VALUES_ALLOW_SYMLINKS
value: "false"
- name: HELM_SECRETS_VALUES_ALLOW_ABSOLUTE_PATH
value: "false"
- name: HELM_SECRETS_VALUES_ALLOW_PATH_TRAVERSAL
value: "false"
volumes:
- name: custom-tools
emptyDir: {}
- name: custom-tools-helm
emptyDir: {}
- name: gnupg-home
emptyDir: {}
- name: sops-gpg
secret:
secretName: sops-gpg
volumeMounts:
- mountPath: /home/argocd/.gnupg
name: gnupg-home
subPath: .gnupg
- mountPath: /usr/local/bin/kustomize
name: custom-tools
subPath: kustomize
# Verify this matches a XDG_CONFIG_HOME=/.config env variable
- mountPath: /.config/kustomize/plugin/viaduct.ai/v1/ksops/ksops
name: custom-tools
subPath: ksops
- mountPath: /custom-tools/helm-plugins
name: custom-tools-helm
subPath: helm-plugins
- mountPath: /custom-tools/kubectl
name: custom-tools-helm
subPath: kubectl
- mountPath: /custom-tools/sops
name: custom-tools-helm
subPath: sops
- mountPath: /custom-tools/vals
name: custom-tools-helm
subPath: vals
initContainers:
- name: 1-install-ksops
image: viaductoss/ksops:v3.0.1
command: ["/bin/sh", "-c"]
args:
- echo "Installing KSOPS...";
mv ksops /custom-tools/;
mv $GOPATH/bin/kustomize /custom-tools/;
echo "Done.";
volumeMounts:
- mountPath: /custom-tools
name: custom-tools
- name: 2-download-tools
image: alpine:latest
command: [sh, -ec]
env:
- name: HELM_SECRETS_VERSION
value: "3.12.0"
- name: KUBECTL_VERSION
value: "1.24.3"
- name: VALS_VERSION
value: "0.18.0"
- name: SOPS_VERSION
value: "3.7.3"
args:
- |
echo "Installing helm secrets...";
mkdir -p /custom-tools/helm-plugins
wget -qO- https://github.com/jkroepke/helm-secrets/releases/download/v${HELM_SECRETS_VERSION}/helm-secrets.tar.gz | tar -C /custom-tools/helm-plugins -xzf-;
echo "Done.";
echo "Downloading SOPS=${SOPS_VERSION} and kubectl ...";
wget -qO /custom-tools/sops https://github.com/mozilla/sops/releases/download/v${SOPS_VERSION}/sops-v${SOPS_VERSION}.linux
wget -qO /custom-tools/kubectl https://dl.k8s.io/release/v${KUBECTL_VERSION}/bin/linux/amd64/kubectl
echo "Done.";
echo "Downloading vals...";
wget -qO- https://github.com/variantdev/vals/releases/download/v${VALS_VERSION}/vals_${VALS_VERSION}_linux_amd64.tar.gz | tar -xzf- -C /custom-tools/ vals;
echo "Done.";
chmod +x /custom-tools/*;
volumeMounts:
- mountPath: /custom-tools
name: custom-tools-helm
- name: 3-import-gpg-key
image: argoproj/argocd:v2.2.5
command: ["gpg", "--import","/sops-gpg/gpg_key_smardigo_automation__private"]
env:
- name: GNUPGHOME
value: /gnupg-home/.gnupg
volumeMounts:
- mountPath: /sops-gpg
name: sops-gpg
- mountPath: /gnupg-home
name: gnupg-home
server:
logLevel: warn
logFormat: json
config:
kustomize.buildOptions: "--enable-alpha-plugins"
helm.valuesFileSchemes: >-
secrets+gpg-import, secrets+gpg-import-kubernetes,
secrets+age-import, secrets+age-import-kubernetes,
secrets,secrets+literal,
https
service:
sessionAffinity: ClientIP
ingress:
enabled: true
annotations:
cert-manager.io/cluster-issuer: letsencrypt-prod
cert-manager.io/issue-temporary-certificate: "true"
kubernetes.io/ingress.class: nginx
nginx.ingress.kubernetes.io/whitelist-source-range: "{{ ( ip_whitelist ) | join(',') }}"
nginx.ingress.kubernetes.io/force-ssl-redirect: "false"
nginx.ingress.kubernetes.io/ssl-passthrough: "true"
nginx.ingress.kubernetes.io/backend-protocol: "HTTPS"
hosts:
- "{{ shared_service_kube_hostname_argocd }}"
tls:
- secretName: "{{ stage }}-kube-argocd-cert"
hosts:
- "{{ shared_service_kube_hostname_argocd }}"
dex:
enabled: false
applicationSet:
enabled: false
configs:
secret:
argocdServerAdminPassword: '{{ argocd_server_admin_password | password_hash("bcrypt") }}'

@ -1,16 +0,0 @@
---
awx_oidc_realm: "stage-awx"
awx_oidc_client_id: "stage-awx"
awx_oidc_client_secret: "{{ awx_oidc_client_secret_vault }}"
awx_oidc_admin_username: "{{ awx_admin_username }}"
awx_oidc_admin_password: "{{ awx_admin_password }}"
awx_oidc_admin_email: "{{ devops_email_address }}"
awx_custom_ee_image: "/awx/awx-custom-ee"
awx_ansible_user_name: "awx"
awx_ansible_user_ssh_key_private: "{{ ansible_ssh_key_private_vault }}"
awx_credential_machine_hetzner_name: hetzner-ansible-ssh
awx_ansible_username: ansible
awx_ansible_password: ansible

@ -1,4 +0,0 @@
---
backup_communication_keys_repository: "https://{{ gitea_admin_username | urlencode() }}:{{ gitea_admin_password | urlencode() }}@{{ shared_service_hostname_gitea }}/gitea-admin/communication-keys.git"
backup_communication_keys_stage_gpg_key: "smardigo_automation_{{ stage }}.gpg.pub"
backup_gpg_recipient: "smardigo automation {{ stage | upper }}"

@ -1,31 +0,0 @@
---
#blackbox_exporter_cli_flags:
# log.level: "debug"
blackbox_exporter_version: "{{ prom_blackbox_exporter_version }}"
blackbox_exporter_configuration_modules:
http_2xx:
http:
method: GET
valid_status_codes: []
prober: http
timeout: 5s
http_3xx:
http:
method: GET
valid_status_codes:
- 301
- 302
prober: http
timeout: 5s
http_4xx:
http:
method: GET
valid_status_codes:
- 401
- 403
prober: http
timeout: 5s

@ -1,42 +0,0 @@
---
# If elastic is served from k8s we need to reach elastic via load balancer port tcp/443
shared_service_connect_data_hostname: "{{ stage }}-connect-data.{{ domain_env }}:443"
shared_service_connect_data_username: "{{ elastic_connect_data_username_vault | default(elastic_admin_username) }}"
shared_service_connect_data_password: "{{ elastic_connect_data_password_vault | default(elastic_admin_password) }}"
connect_id: "{{ inventory_hostname }}-connect"
connect_base_url: "{{ connect_id }}.{{ domain }}"
process_search_id: "{{ inventory_hostname }}-process-search"
wordpress_id: "{{ inventory_hostname }}-wordpress"
wordpress_base_url: "{{ wordpress_id }}.{{ domain }}"
connect_workflow_env:
- "stage:{{ stage }}"
- "protocol:{{ http_s }}"
- "hostname:{{ connect_base_url }}"
- "managementHostname:{{ shared_service_host_management }}"
- "kibanaHostname:{{ shared_service_hostname_kibana }}"
- "keycloakHostname:{{ shared_service_hostname_keycloak }}"
- "smardigoUserToken:{{ smardigo_auth_token_value | default('-') }}"
smardigo_auth_token_name: "Smardigo-User-Token"
smardigo_default_theme: "/themes/netgo.json"
# digital ocean dns service (-> dns-challenge)
connect_customer_urls_digitalocean: []
# hetzner dns service (-> dns-challenge)
connect_customer_urls_hetzner: []
# dns is managed by external provider (-> http-challenge)
connect_customer_urls_extern: []
# configure reverse proxy for each url
# keycloak redirect/origins for each url
connect_customer_urls: "{{
connect_customer_urls_digitalocean
+ connect_customer_urls_hetzner
+ connect_customer_urls_extern }}"
# allow customer specific access from ips in cidr notation (e.g. 1.2.3.4/32)
# use 0.0.0.0/0 for public access
connect_customer_networks: []

@ -1,21 +0,0 @@
---
shared_service_maria_primary: "{{ stage }}-maria-01"
shared_service_postgres_primary: "{{ stage }}-postgres01-01"
shared_service_postgres_secondary: "{{ stage }}-postgres01-02"
shared_service_pg_master_ip: "{{ stage_server_infos
| selectattr('name', '==', shared_service_postgres_primary )
| map(attribute='private_ip')
| list
| first
| default('-') }}"
shared_service_pg_slave_ip: "{{ stage_server_infos
| selectattr('name', '==', shared_service_postgres_secondary )
| map(attribute='private_ip')
| list
| first
| default('-') }}"
postgres_replicator_user: "replicator"
postgres_replicator_user_password: "{{ postgres_replicator_user_password_vault }}"

@ -1,8 +0,0 @@
---
dns: digitalocean
domain: "smardigo.digital"
domain_env: "{{ domain }}"
traefik_letsencrypt_provider: "digitalocean"
hetzner_dns_api_key: '{{ hetzner_dns_api_key_vault }}'
digitalocean_authentication_token: '{{ digitalocean_authentication_token_vault }}'

@ -1,16 +0,0 @@
---
# We use a proxy due to blocked ip addresses by elastic
filebeat_image_name: "{{ shared_service_hostname_harbor }}/docker.elastic.co/beats/filebeat"
metricbeat_image_name: "{{ shared_service_hostname_harbor }}/docker.elastic.co/beats/metricbeat"
elastic_cluster_settings_max_shards: 1000
ilm_configuration:
- name: default_housekeeping
strategy: delete_after
retention: 60d
priority: 200
template: "default_housekeeping"
patterns:
- "{{ stage }}-*"
- "uncategorized-*"

@ -1,290 +0,0 @@
---
hcloud_firewall_objects:
-
name: "{{ stage }}-default"
state: present
rules:
-
direction: in
protocol: icmp
port: ''
source_ips: '{{ ip_whitelist }}'
destination_ips: []
description: "ICMP allowed"
-
direction: in
protocol: tcp
port: '22'
source_ips: '{{ ip_whitelist }}'
destination_ips: []
description: "Allow access to SSH for whitelisted ips"
-
direction: in
protocol: tcp
port: '80'
source_ips: '{{ ip_whitelist }}'
destination_ips: []
description: "Allow access to HTTP for whitelisted ips"
-
direction: in
protocol: tcp
port: '443'
source_ips: '{{ ip_whitelist }}'
destination_ips: []
description: "Allow access to HTTPS for whitelisted ips"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }}'
-
name: "{{ stage }}-monitoring"
state: present
rules:
-
direction: in
protocol: tcp
port: '9080-9085'
source_ips: '{{ ip_whitelist + [ lookup("community.general.dig", stage + "-prometheus-01." + domain ) + "/32"] }}'
destination_ips: []
description: "Allow access to Server/Service Monitoring for whitelisted ips"
-
direction: in
protocol: tcp
port: '9001'
source_ips: '{{ ip_whitelist }}'
destination_ips: []
description: "Allow access to PgAdmin Monitoring for whitelisted ips"
-
direction: in
protocol: tcp
port: '9187'
source_ips: '{{ ip_whitelist }}'
destination_ips: []
description: "Allow access to Postgres-Exporter Monitoring for whitelisted ips"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }}'
-
name: "{{ stage }}-monitoring-extern-https"
state: present
rules:
-
direction: in
protocol: tcp
port: '443'
source_ips:
- "{{ lookup('community.general.dig', '{{ shared_service_hostname_blackbox_exporter }}' ) }}/32"
destination_ips: []
description: "Allow access to Blackbox Monitoring for whitelisted ips"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=connect'
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=keycloak'
-
name: "{{ stage }}-access-to-kubernetes-api"
state: present
rules:
-
direction: in
protocol: tcp
port: '6443'
source_ips: "{{ ip_whitelist }}"
destination_ips: []
description: "Allow access to K8-API for whitelisted ips"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=kube_control_plane'
hcloud_firewall_objects_awx:
-
name: "{{ stage }}-awx-ssh-access-for-k8s-nodes"
state: present
rules:
-
direction: in
protocol: tcp
port: '22'
source_ips: "{{ k8s_worker_node_ips }}"
destination_ips: []
description: "Allow access to SSH from kubernetes worker nodes"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service'
hcloud_firewall_objects_vpn:
-
name: "{{ stage }}-vpn-access"
state: present
rules:
-
direction: in
protocol: udp
port: "{{ service_port_wireguard }}"
source_ips:
- "0.0.0.0/0"
destination_ips: []
description: "Allow access to VPN"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=vpn'
hcloud_firewall_objects_backup:
-
name: "{{ stage }}-backup-ssh-access"
state: present
rules:
-
direction: in
protocol: tcp
port: '22'
source_ips:
- "{{ offsite_storage_server_ip }}"
destination_ips: []
description: "Allow access to BACKUP from offsite"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=backup'
hcloud_firewall_objects_gitea:
-
name: "{{ stage }}-access-to-gitea"
state: present
rules:
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ ip_whitelist }}"
destination_ips: []
description: "Allow access to GITEA for whitelisted ips"
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ [shared_service_network] + k8s_worker_node_ips }}"
destination_ips: []
description: "Allow access to GITEA for kubernetes worker nodes"
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ [shared_service_network] + (gitea_https_whitelisted_ips | default([])) }}"
destination_ips: []
description: "Allow access to GITEA for custom whitelisted ips"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=gitea'
hcloud_firewall_objects_keycloak:
-
name: "{{ stage }}-access-to-keycloak"
state: present
rules:
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ ip_whitelist }}"
destination_ips: []
description: "Allow access to KEYCLOAK for custom whitelisted ips"
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ [shared_service_network] + k8s_worker_node_ips }}"
destination_ips: []
description: "Allow access to KEYCLOAK for kubernetes worker nodes"
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ [shared_service_network] + (keycloak_https_whitelisted_ips | default([])) }}"
destination_ips: []
description: "Allow access to KEYCLOAK for custom whitelisted ips"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=keycloak'
hcloud_firewall_objects_kibana:
-
name: "{{ stage }}-access-to-kibana"
state: present
rules:
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ ip_whitelist }}"
destination_ips: []
description: "Allow access to KIBANA for whitelisted ips"
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ [shared_service_network] + k8s_worker_node_ips }}"
destination_ips: []
description: "Allow access to KIBANA for kubernetes worker nodes"
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ [shared_service_network] + (kibana_https_whitelisted_ips | default([])) }}"
destination_ips: []
description: "Allow access to KIBANA for custom whitelisted ips"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=kibana'
hcloud_firewall_objects_management:
-
name: "{{ stage }}-access-to-management"
state: present
rules:
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ ip_whitelist }}"
destination_ips: []
description: "Allow access to MANAGEMENT for whitelisted ips"
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ [shared_service_network] + k8s_worker_node_ips }}"
destination_ips: []
description: "Allow access to MANAGEMENT for kubernetes worker nodes"
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ [shared_service_network] + (management_https_whitelisted_ips | default([])) }}"
destination_ips: []
description: "Allow access to MANAGEMENT for custom whitelisted ips"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=connect,tenant=management'

@ -1,7 +0,0 @@
---
gitea_oidc_realm: "stage-gitea"
gitea_oidc_client_id: "stage-gitea"
gitea_oidc_client_secret: "{{ gitea_oidc_client_secret_vault | default(gitea_client_secret) }}" # backwards compatibility
gitea_oidc_admin_username: "{{ gitea_admin_username }}"
gitea_oidc_admin_password: "{{ gitea_admin_password }}"
gitea_oidc_admin_email: "{{ devops_email_address }}"

@ -1,4 +0,0 @@
# smardigo automation {{ stage }} gpg key
# https://git.dev-at.de/smardigo-hetzner/communication-keys/
# push mirror: https://{{ stage }}-gitea-01.smardigo.digital/gitea-admin/communication-keys/
gpg_key_smardigo_automation__private: "{{ gpg_key_smardigo_automation__private__vault }}"

@ -1,18 +0,0 @@
---
# Define a list of unprivileged Grafana users which will be assigned to the 'Viewer' role
# Set initial login password for all users, needs to be changed by the user afterwards!
grafana_users:
- name: "smardigo"
login: "smardigo"
email: "{{ grafana_smardigo_email }}"
password: "{{ grafana_smardigo_password }}"
grafana_smardigo_password: "{{ grafana_smardigo_password_vault }}"
# Define Grafana Dashboards which should be visible users without admin role
# See uids from in hetzner-ansible/templates/prometheus/config/grafana/provisioning/dashboards/*.json
grafana_dashboard_whitelist:
- "hb7fSE0Zz" # Servers
- "spring_boot_21" # Spring Boot Statistics
- "000000039" # PostgreSQL Database
- "549c2bf8936f7767ea6ac47c47b00f2a" # MySQL

@ -1,10 +0,0 @@
---
harbor_oidc_realm: "stage-harbor"
harbor_oidc_client_id: "stage-harbor"
harbor_oidc_client_secret: "{{ harbor_oidc_client_secret_vault | default(docker_registry_oidc_client_secret_vault) }}" # backwards compatibility
harbor_oidc_admin_username: "harbor-admin"
harbor_oidc_admin_password: "{{ harbor_oidc_admin_password_vault }}"
harbor_oidc_admin_email: "{{ devops_email_address }}"
harbor_username: "{{ docker_registry_username_vault }}"
harbor_token: "{{ docker_registry_token_vault }}"

@ -1,14 +0,0 @@
---
keycloak_admin_username: "keycloak-admin"
keycloak_admin_password: "{{ keycloak_admin_password_vault }}"
keycloak_default_theme: "mpmx-theme"
# Additional ips to consider as intranet ips (e.g. inter stage communication)
keycloak_ip_whitelist: []
# Use these Realm ACLs to create custom Traefik labels for Keycloak to restrict admin access per realm
# Both variables are mandatory!
# name: <realm_name>
# admin_ips: <ip/range in cidr notation>
keycloak_admin_realm_acls: []

@ -1,2 +0,0 @@
---
kube_image_repo: "prodnso-harbor-01.smardigo.digital/k8sgcrio-proxy"

@ -1,2 +0,0 @@
---
logstash_ssl_enabled: false

@ -1,39 +0,0 @@
---
management_admin_username: "management-admin" # backwards compatibility
management_admin_password: "{{ management_admin_password_vault }}" # backwards compatibility
management_oidc_realm: "stage-connect"
management_oidc_client_id: "connect"
management_oidc_client_secret: "{{ management_oidc_client_secret_vault }}"
management_oidc_admin_username: "management-admin"
management_oidc_admin_password: "{{ management_admin_password_vault }}"
management_oidc_admin_email: "{{ devops_email_address }}"
management_configurations:
- pmci
- backup
connect_connection_teams_url: "{{ netgo_msteams_hook_cd }}"
connect_connection_awx_url: "{{ shared_service_kube_url_awx }}/"
connect_connection_harbor_url: "{{ shared_service_url_harbor }}/v2/"
connect_connections:
- id: "teams"
name: "MS Teams"
url: "{{ connect_connection_teams_url }}"
connectionType: "HTTPS"
authType: "NO_AUTH"
- id: "awx"
name: "AWX"
url: "{{ connect_connection_awx_url }}"
connectionType: "HTTP"
authType: "BASIC_AUTH"
username: "{{ awx_admin_username }}"
password: "{{ awx_admin_password }}"
- id: "harbor"
name: "Harbor"
url: "{{ connect_connection_harbor_url }}"
connectionType: "HTTP"
authType: "BASIC_AUTH"
username: "{{ harbor_admin_username }}"
password: "{{ harbor_admin_password }}"

@ -1,8 +0,0 @@
---
pgadmin4_oidc_realm: "stage-pgadmin4"
pgadmin4_oidc_client_id: "stage-pgadmin4"
pgadmin4_oidc_dev_username: "pgadmin-dev"
pgadmin4_oidc_dev_password: "{{ pgadmin4_oidc_dev_password_vault }}"
pgadmin4_oidc_client_secret: "{{ pgadmin4_oidc_client_secret_vault }}"
pgadmin4_oidc_dev_email: "developer@netgo.de"
pgadmin4_oidc_admin_email: "{{ devops_email_address }}"

@ -1,6 +1,6 @@
---
ansible_ssh_host: "{{ stage_server_domain }}"
debug: false
ssh_macs:
- umac-128-etm@openssh.com
- hmac-sha2-256-etm@openssh.com
@ -23,9 +23,8 @@ ssh_ciphers:
- aes256-ctr
- aes128-gcm@openssh.com
- aes256-gcm@openssh.com
ssh_permit_root_login: "yes"
ssh_permit_root_login: 'yes'
debug: false
docker_enabled: true
docker_config_enabled: true
traefik_enabled: true
@ -34,124 +33,116 @@ metricbeat_enabled: false
node_exporter_enabled: true
common_apt_dependencies:
- jq
- mc
- vim
# TODO Check if we really want this
# TODO Check if we really want this
- zip
- curl
- htop
- iotop
- net-tools
- bash-completion
- python3-pip
common_pip_dependencies:
- passlib
- pyOpenSSL>=23.0
- docker==5.0.3
- docker-compose==1.29.2
- requests==2.28
- docker-compose
use_ssl: true
http_s: "http{{ use_ssl | ternary('s', '', omit) }}"
domain: "smardigo.digital"
stage_server_domain: "{{ inventory_hostname }}.{{ domain }}"
stage_server_url: "{{ http_s }}://{{ stage_server_domain }}"
stage_kube_load_balancer: "{{ stage_kube }}-ingress"
alertmanager_channel_smardigo: "#monitoring-{{ stage }}"
hetzner_server_type: cx11
hetzner_server_image: ubuntu-20.04
hetzner_location: nbg1
hetzner_load_balancer_type: lb11
gitlab_ansible_user_name: "gitlabci"
awx_ansible_user_name: "awx"
awx_ansible_user_ssh_key_private: "{{ ansible_ssh_key_private_vault }}"
awx_credential_machine_hetzner_name: hetzner-ansible-ssh
backupuser_user_name: backupuser
gitlab_ansible_user_name: "gitlabci"
# used for root-access by hetzner on server creation
# all ssh keys have to be available to hetzner cloud
# (@see cloud console / security / ssh-keys) (web ui)
default_hetzner_ssh_keys:
# used for root-access by hetzner on server creation (@see cloud console/security/ssh-keys)
hetzner_ssh_keys:
- "claus.paetow@netgo.de"
- "friedrich.goerz@netgo.de"
- "peter.heise@netgo.de"
- "sven.ketelsen@netgo.de"
- "michael.haehnel@netgo.de"
- "hoan.to@netgo.de"
- "{{ awx_ansible_user_name }}@netgo.de"
- "{{ gitlab_ansible_user_name }}@git.dev-at.de"
hetzner_ssh_keys: "{{
default_hetzner_ssh_keys
+ (custom_stage_hetzner_ssh_keys | default([]))
}}"
- "{{ gitlab_ansible_user_name }}@netgo.de"
hetzner_server_labels: "stage={{ stage }} service=none"
hetzner_server_labels: "stage={{ stage }}"
admin_user: "root"
sudo_groups:
[
{ id: "CentOS", sudo_group: "wheel" },
{ id: "RedHat", sudo_group: "wheel" },
{ id: "Ubuntu", sudo_group: "sudo" },
]
sudo_groups: [
{
id: "CentOS",
sudo_group: "wheel",
},
{
id: "RedHat",
sudo_group: "wheel",
},
{
id: "Ubuntu",
sudo_group: "sudo",
},
]
sudo_group: "{{ sudo_groups
| selectattr('id', 'match', '' + ansible_distribution + '')
| selectattr('id', 'match', '' + ansible_distribution + '' )
| map(attribute='sudo_group')
| list
| first
| replace('.', '-') }}"
| replace('.','-') }}"
# whitelist for outdated user detection - they wont't be deleted at all
default_users:
- "nobody"
- "elastic"
- "postgres"
- "backuphamster"
- "administrator"
- "{{ admin_user }}"
default_platform_users:
- "claus.paetow"
- "sven.ketelsen"
- "michael.haehnel"
- "hoan.to"
- "{{ awx_ansible_user_name }}"
- "{{ gitlab_ansible_user_name }}"
smardigo_platform_users: "{{
default_platform_users
+ (custom_platform_users | default([]))
+ (custom_stage_platform_users | default([]))
}}"
ip_whitelist_netgo:
default_plattform_users:
- 'nobody'
- 'elastic'
- 'postgres'
- 'administrator'
- '{{ admin_user }}'
- '{{ backupuser_username }}'
smardigo_plattform_users:
- 'claus.paetow'
- 'friedrich.goerz'
- 'peter.heise'
- 'sven.ketelsen'
- '{{ awx_ansible_user_name }}'
- '{{ gitlab_ansible_user_name }}'
ip_whitelist_admins:
- "79.215.10.239/32" # sven
- "212.86.56.112/32" # peter
ip_whitelist:
- "212.121.131.106/32" # netgo berlin
- "149.233.6.129/32" # netgo e-shelter
- "46.245.219.98/32" # netgo borken
- "164.138.195.162/32" # netgo Aachen
ip_whitelist: "{{ ip_whitelist_netgo + [shared_service_network] + [shared_service_vpn_ip + '/32'] if shared_service_vpn_ip else ip_whitelist_netgo + [shared_service_network] }}"
- "{{ shared_service_network }}"
offsite_storage_server_ip: 142.132.155.83/32
# for test purpose DEV-361
# currently (2022.03.18) set to IP of hetzner VM
gitlab_storage_server: 167.235.18.147/32
docker_owner: "{{ admin_user }}"
docker_group: "{{ admin_user }}"
docker_users: "{{ smardigo_platform_users }}"
docker_users: "{{ smardigo_plattform_users }}"
docker_compose_version: "1.29.2"
docker_compose_path: "/usr/bin/docker-compose"
service_base_path: "/etc/smardigo"
service_base_path: '/etc/smardigo'
devops_email_address: "nso.devops@netgo.de"
gitea_admin_email: "{{ devops_email_address }}"
lets_encrypt_email: "{{ devops_email_address }}"
connect_admin_email: "{{ devops_email_address }}"
keycloak_admin_email: "{{ devops_email_address }}"
pgadmin4_admin_email: "{{ devops_email_address }}"
grafana_admin_email: "{{ devops_email_address }}"
grafana_smardigo_email: "{{ devops_email_address }}"
harbor_oidc_admin_email: "{{ devops_email_address }}"
argocd_admin_email: "{{ devops_email_address }}"
wordpress_admin_email: "{{ devops_email_address }}"
gitea_admin_email: "nso.devops@netgo.de"
lets_encrypt_email: "nso.devops@netgo.de"
connect_admin_email: "nso.devops@netgo.de"
keycloak_admin_email: "nso.devops@netgo.de"
pgadmin4_admin_email: "nso.devops@netgo.de"
harbor_oidc_admin_email: "nso.devops@netgo.de"
http_port: "80"
https_port: "443"
@ -166,15 +157,14 @@ service_port_logstash: "5044"
service_port_postgres: "5432"
service_port_kibana: "5601"
service_port_cadvisor: "8080"
service_port_webdav: "8080"
service_port_keycloak: "8080"
service_port_iam: "8082"
service_port_sonarqube: "9000"
service_port_pgadmin: "9001"
service_port_phpmyadmin: "9002"
service_port_node_exporter: "9100"
service_port_blackbox_exporter: "9115"
service_port_elasticsearch: "9200"
service_port_wireguard: "51820"
monitor_port_system: "9082"
monitor_port_docker: "9083"
@ -186,13 +176,142 @@ monitor_port_postgres: "9087"
admin_port_service: "9081"
admin_port_traefik: "9080"
connect_id: "{{ inventory_hostname }}-connect"
connect_base_url: "{{ connect_id }}.{{ domain }}"
wordpress_id: "{{ inventory_hostname }}-wordpress"
wordpress_base_url: "{{ wordpress_id }}.{{ domain }}"
smardigo_auth_token_name: "Smardigo-User-Token"
filebeat_certificate: "{{ stage }}-elastic-stack-filebeat"
logstash_certificate: "{{ stage }}-elastic-stack-logstash-01"
backup_directory: "/backups"
get_current_date: "{{ lookup('pipe', 'date +%Y-%m-%d') }}"
get_current_date_time: "{{ lookup('pipe', 'date +%Y-%m-%d_%H:%M') }}"
blackbox_exporter_fqdn: "dev-blackbox-01.{{ domain }}"
blackbox_http_2xx_targets:
- 'https://{{ stage }}-keycloak-01.smardigo.digital/auth/'
#- 'https://{{ stage }}-management-01-connect.smardigo.digital/'
blackbox_http_2xx_additional_targets: []
prometheus_federation_enabled: true
kubernetes_prometheus_endpoint: "{{ stage }}-kube-prometheus.{{ domain }}"
backupuser_username: backupuser
backupuser_ssh_pubkey: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDAFRYAy3PqimYUWcO4Q9pdTvDQTsq7hKjWYoQEsJICnRRv+W+5d2lJvC3gqMpmWy9XxtrYePkVHCgIvfJSas9Jv7n7eeYoeWLWJq0nRSKg6EKFCH9y3v8tGPJQQf7wogOhHwr6m79c+lpNVUsVR+QOf76+47ZuwnuEBzK6xbDkmwyt7SPrJ59IFxOlmtz2HgVlTLczLalMygM4qlXqIt+lwuuFz4CsGcr4TwMKp9Uk6SCP3OV12oLnUUUOA3r72qmE4+JeUN6VNbXoBXEANfXm5kbM8w+dFhulCi1fQZCssB8PStA7Cs0gVqL6DYNUKRZaFL8e77hljGkPlOQDxOsBexPuceSDmmr6s5qT1wA6bnEFoeWbLlxixGlFA+1Q/LqWsYzoOZiTHDoaXvsc4VizlPp4Fn0OgJefPjuzBsWOyf0ob5oucfnmCAvEh/k+ioq0bIQDcliAM1UezitblHQgGHhqnKPMi664i0ULLiExARe4IV3KJiaG++RJyzUL5HNz3Qru+K5/pdj2jffluYTC4w+6ZYfjWEZS/DAumExv9T97kFOsapHCQJwTBa368Ch6uKkPCZO8p/ra3xTIUh/PibHaVCadgX2NR9q6jdiQtmc0SOyNJlMlPZD/Q1NrjXJ18ASny7gCBFItMyMtinVx9xQxQ+PFLB8oNYERw1ejIw== storage-server-smardigo'
current_date_time: "{{ lookup('pipe','date +%Y-%m-%d_%H:%M') }}"
hcloud_firewall_objects:
-
name: "{{ stage }}-default"
state: present
rules:
-
direction: in
protocol: icmp
port: ''
source_ips: '{{ ip_whitelist + ip_whitelist_admins }}'
destination_ips: []
description: ICMP allowed
-
direction: in
protocol: tcp
port: '22'
source_ips: '{{ ip_whitelist + ip_whitelist_admins }}'
destination_ips: []
description: SSH allowed
-
direction: in
protocol: tcp
port: '80'
source_ips: '{{ ip_whitelist + ip_whitelist_admins }}'
destination_ips: []
description: HTTP allowed
-
direction: in
protocol: tcp
port: '443'
source_ips: '{{ ip_whitelist + ip_whitelist_admins }}'
destination_ips: []
description: HTTPS allowed
-
direction: in
protocol: tcp
port: 'any'
source_ips: '{{ ip_whitelist_admins }}'
destination_ips: []
description: TCP - allow work from home without VPN
-
direction: in
protocol: udp
port: 'any'
source_ips: '{{ ip_whitelist_admins }}'
destination_ips: []
description: UDP - allow work from home without VPN
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }}'
-
name: "{{ stage }}-monitoring"
state: present
rules:
-
direction: in
protocol: tcp
port: '9080-9085'
source_ips: '{{ ip_whitelist + ip_whitelist_admins }}'
destination_ips: []
description: 'Server/Service Monitoring'
-
direction: in
protocol: tcp
port: '9001'
source_ips: '{{ ip_whitelist + ip_whitelist_admins }}'
destination_ips: []
description: 'PgAdmin'
-
direction: in
protocol: tcp
port: '9187'
source_ips: '{{ ip_whitelist + ip_whitelist_admins }}'
destination_ips: []
description: 'Postgres-Exporter'
-
direction: in
protocol: tcp
port: '80'
source_ips: '{{ ip_whitelist + ip_whitelist_admins }}'
destination_ips: []
description: 'AWX'
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }}'
-
name: "{{ stage }}-monitoring-extern-https"
state: present
rules:
-
direction: in
protocol: tcp
port: '443'
source_ips:
- "{{ lookup('community.general.dig', 'dev-blackbox-01.smardigo.digital' ) }}/32"
destination_ips: []
description: null
apply_to:
-
type: label_selector
label_selector:
selector: 'service=connect'
-
type: label_selector
label_selector:
selector: 'service=keycloak'
hetzner_authentication_ansible: "{{ hetzner_authentication_ansible_vault }}"
hetzner_authentication_ccm: "{{ hetzner_authentication_ccm_vault }}"
@ -202,22 +321,83 @@ k8s_basic_services:
- kubelet
- containerd
selfsigned_ca_private_key_passphrase: "{{ selfsigned_ca_private_key_passphrase_vault }}"
# hetzner upstream DNSservers
upstream_dns_servers:
- 185.12.64.1
- 185.12.64.2
keycloak_admin_username: "keycloak-admin"
keycloak_admin_password: "{{ keycloak_admin_password_vault }}"
# Note: all dollar signs in the hash need to be doubled for escaping.
# To create user:password pair, it's possible to use this command:
# echo $(htpasswd -nb user password) | sed -e s/\\$/\\$\\$/g
traefik_admin_username: "traefik-admin"
traefik_admin_password_htpasswd: "{{ traefik_admin_password_htpasswd_vault }}"
mysql_root_username: "{{ mysql_root_username_vault }}"
mysql_root_password: "{{ mysql_root_password_vault }}"
lvm_volume_encryption: false
hcloud_firewall_objects_awx:
-
name: "{{ stage }}-awx-ssh-access-for-k8s-nodes"
state: present
rules:
-
direction: in
protocol: tcp
port: '22'
source_ips: "{{ src_ips }}"
destination_ips: []
description: null
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }}'
-
name: "{{ stage }}-awx-access-SMA-mgmt-instance"
state: present
rules:
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ src_ips }}"
destination_ips: []
description: null
apply_to:
-
type: label_selector
label_selector:
selector: 'service=connect,tenant=management'
-
name: "{{ stage }}-awx-access-443-SMA-peripheral-instances"
state: present
rules:
-
direction: in
protocol: tcp
port: '443'
source_ips: "{{ src_ips }}"
destination_ips: []
description: null
apply_to:
-
type: label_selector
label_selector:
selector: 'service=gitea'
-
type: label_selector
label_selector:
selector: 'service=keycloak'
-
type: label_selector
label_selector:
selector: 'service=kibana'
hcloud_firewall_objects_backup:
-
name: "{{ stage }}-database-backup-ssh-access"
state: present
rules:
-
direction: in
protocol: tcp
port: '22'
source_ips:
- "{{ gitlab_storage_server }}"
destination_ips: []
description: null
apply_to:
-
type: label_selector
label_selector:
selector: 'service=postgres'
-
type: label_selector
label_selector:
selector: 'service=maria'

@ -1,37 +0,0 @@
---
# node exporter exposes data only into the private network
node_exporter_listen_address: "{{ stage_private_server_ip }}"
blackbox_http_2xx_targets:
- "{{ shared_service_url_gitea }}"
- "{{ shared_service_url_harbor }}"
- "{{ shared_service_url_keycloak }}/auth/"
- "{{ shared_service_url_kibana }}"
- "{{ shared_service_kube_url_awx }}"
blackbox_http_4xx_targets:
- "{{ shared_service_url_management }}"
prometheus_tsdb_rentention_time: "4w"
prometheus_federation_enabled: true
prometheus_remote_write_enabled: true
prometheus_alert_diskspaceusage_warning: 85
prometheus_alert_pg_replication_lag: 120
alertmanager_channel_smardigo: "#monitoring-{{ stage }}"
prometheus_enabled: true
prometheus_alertmanager_enabled: true
prometheus_grafana_enabled: true
prometheus_prom2teams_enabled: true
prometheus_service_names:
- "{{ (prometheus_enabled | default(true)) | ternary(prometheus_id, '') }}"
- "{{ (prometheus_alertmanager_enabled | default(true)) | ternary(alertmanager_id, '') }}"
- "{{ (prometheus_grafana_enabled | default(true)) | ternary(grafana_id, '') }}"
- "{{ (prometheus_prom2teams_enabled | default(true)) | ternary(prom2teams_id, '') }}"
prometheus_docker_volume_names:
- "{{ (prometheus_alertmanager_enabled | default(true)) | ternary(alertmanager_id + '-data', '') }}"
- "{{ (prometheus_grafana_enabled | default(true)) | ternary(grafana_id + '-data', '') }}"

@ -1,84 +1,4 @@
---
# TODO variable shouldn't used in a global way
elastic_id: "{{ inventory_hostname }}-elastic"
# TODO variable shouldn't used in a global way
elastic_exporter_id: "{{ inventory_hostname }}-elastic-exporter"
shared_service_url_harbor: "https://{{ shared_service_hostname_harbor }}"
shared_service_hostname_harbor: "{{ stage }}-harbor-01.{{ domain_env }}"
shared_service_url_gitea: "https://{{ shared_service_hostname_gitea }}"
shared_service_hostname_gitea: "{{ stage }}-gitea-01.{{ domain_env }}"
shared_service_url_keycloak: "https://{{ shared_service_hostname_keycloak }}"
shared_service_hostname_keycloak: "{{ stage }}-keycloak-01.{{ domain_env }}"
shared_service_url_kibana: "https://{{ shared_service_hostname_kibana }}"
shared_service_hostname_kibana: "{{ shared_service_kube_hostname_kibana }}"
shared_service_host_management: "{{ stage }}-management-01"
shared_service_url_management: "https://{{ shared_service_hostname_management }}"
shared_service_hostname_management: "{{ shared_service_host_management }}-connect.{{ domain_env }}"
shared_service_url_pgadmin4: "https://{{ shared_service_hostname_pgadmin4 }}"
shared_service_hostname_pgadmin4: "{{ stage }}-pgadmin4-01-pgadmin4.{{ domain_env }}"
shared_service_url_grafana: "https://{{ shared_service_hostname_grafana }}"
shared_service_hostname_grafana: "{{ stage }}-prometheus-01-grafana.{{ domain_env }}"
shared_service_hostname_logstash: "{{ stage }}-logstash.{{ domain_env }}"
shared_service_hostname_connect_data: "{{ stage }}-connect-data.{{ domain_env }}"
shared_service_hostname_logging_data: "{{ shared_service_elastic_stack_01_hostname }}"
# TODO check if it is still needed
shared_service_elastic_stack_01_hostname: "{{ stage }}-elastic-stack-elastic-01"
shared_service_elastic_stack_02_hostname: "{{ stage }}-elastic-stack-elastic-02"
shared_service_elastic_stack_03_hostname: "{{ stage }}-elastic-stack-elastic-03"
# TODO the blackbox exporter shouldn't be DEV tagged at all
shared_service_hostname_blackbox_exporter: "devnso-blackbox-01.smardigo.digital"
# use private loadbalancer ip for all kubernetes services
stage_kube: "{{ stage }}"
shared_service_kube_url_argocd: "https://{{ shared_service_kube_hostname_argocd }}"
shared_service_kube_hostname_argocd: "{{ stage_kube }}-argocd.{{ domain_env }}"
shared_service_kube_url_awx: "https://{{ shared_service_kube_hostname_awx }}"
shared_service_kube_hostname_awx: "{{ stage_kube }}-awx.{{ domain_env }}"
shared_service_kube_url_gitea: "https://{{ shared_service_kube_hostname_gitea }}"
shared_service_kube_hostname_gitea: "{{ stage_kube }}-gitea.{{ domain_env }}"
shared_service_kube_url_harbor: "https://{{ shared_service_kube_hostname_harbor }}"
shared_service_kube_hostname_harbor: "{{ stage }}-harbor.{{ domain_env }}"
shared_service_kube_url_kibana: "https://{{ shared_service_kube_hostname_kibana }}"
shared_service_kube_hostname_kibana: "{{ stage_kube }}-kibana.{{ domain_env }}"
shared_service_kube_url_prometheus: "https://{{ shared_service_kube_hostname_prometheus }}"
shared_service_kube_hostname_prometheus: "{{ stage_kube }}-prometheus.{{ domain_env }}"
shared_service_kube_url_grafana: "https://{{ shared_service_kube_hostname_grafana }}"
shared_service_kube_hostname_grafana: "{{ stage_kube }}-grafana.{{ domain_env }}"
shared_service_kube_jaeger_collector_hostname: "{{ stage_kube }}-jaeger-collector.{{ domain_env }}"
shared_service_kube_loadbalancer_public_ip_not_available: "public loadbalancer ip not available"
shared_service_kube_loadbalancer_public_ip: "{{ stage_public_ingress_loadbalancer_ip | default(shared_service_kube_loadbalancer_public_ip_not_available) }}"
shared_service_kube_loadbalancer_private_ip_not_available: "private loadbalancer ip not available"
shared_service_kube_loadbalancer_private_ip: "{{ stage_private_ingress_loadbalancer_ip | default(shared_service_kube_loadbalancer_private_ip_not_available) }}"
shared_service_loadbalancer_logstash_public_ip_not_available: "public logstash loadbalancer ip not available"
shared_service_loadbalancer_logstash_public_ip: "{{ stage_public_logstash_loadbalancer_ip | default(shared_service_loadbalancer_logstash_public_ip_not_available) }}"
shared_service_loadbalancer_logstash_private_ip_not_available: "private logstash loadbalancer ip not available"
shared_service_loadbalancer_logstash_private_ip: "{{ stage_private_logstash_loadbalancer_ip | default(shared_service_loadbalancer_logstash_private_ip_not_available) }}"
shared_service_default_additional_hosts:
- name: "{{ shared_service_kube_hostname_argocd }}"
ip: "{{ shared_service_kube_loadbalancer_private_ip }}"
- name: "{{ shared_service_kube_hostname_awx }}"
ip: "{{ shared_service_kube_loadbalancer_private_ip }}"
- name: "{{ shared_service_kube_hostname_gitea }}"
ip: "{{ shared_service_kube_loadbalancer_private_ip }}"
- name: "{{ shared_service_kube_hostname_harbor }}"
ip: "{{ shared_service_kube_loadbalancer_private_ip }}"
- name: "{{ shared_service_kube_hostname_kibana }}"
ip: "{{ shared_service_kube_loadbalancer_private_ip }}"
- name: "{{ shared_service_kube_hostname_prometheus }}"
ip: "{{ shared_service_kube_loadbalancer_private_ip }}"
- name: "{{ shared_service_kube_hostname_grafana }}"
ip: "{{ shared_service_kube_loadbalancer_private_ip }}"
- name: "{{ shared_service_kube_jaeger_collector_hostname }}"
ip: "{{ shared_service_kube_loadbalancer_private_ip }}"
- name: "{{ shared_service_hostname_logstash }}"
ip: "{{ shared_service_loadbalancer_logstash_private_ip }}"
- name: "{{ shared_service_hostname_connect_data }}"
ip: "{{ shared_service_kube_loadbalancer_private_ip }}"
shared_service_additional_hosts: "{{ shared_service_default_additional_hosts + (shared_service_custom_additional_hosts | default([])) }}"

@ -1,33 +0,0 @@
---
elastic_elasticsearch_version: "7.16.3"
elastic_elasticsearch_exporter_version: "v1.5.0"
elastic_filebeat_version: "8.11.1"
elastic_kibana_version: "7.16.3"
elastic_logstash_version: "7.16.3"
elastic_metricbeat_version: "7.16.3"
gitea_version: "1.19"
prom_grafana_version: "9.1.5"
harbor_version: "v2.4.1"
keycloak_version: "21.0.2.7"
pgadmin4_version: "9.2"
prom_alertmanager_version: "v0.25.0"
prom_blackbox_exporter_version: "0.24.0"
prom_prometheus_version: "v2.44.0"
prom_prom2teams_version: "3.2.3" # TODO 4.2.1
traefik_version: "v2.10.3"
connect_version: "10.5.37"
iam_version: "10.5"
process_search_version: "1.3"
wordpress_image_version: "6.8.0-php8.2"
ansible_minimal_version: "2.12.0"
wireguard_version: latest

@ -1,8 +0,0 @@
---
shared_service_vpn: "{{ stage }}-vpn-01"
shared_service_vpn_ip: "{{ stage_server_infos
| selectattr('name', 'match', shared_service_vpn )
| map(attribute='public_ip')
| list
| first
| default('') }}"

@ -1,2 +0,0 @@
---
wordpress_image_name: "wordpress"

@ -1,13 +1,9 @@
---
#TODO needs to be removed after story DEV-361 is finished
hetzner_server_type: "{{ hetzner_server_type_bastelserver | default('cx22') }}"
hetzner_server_labels: "stage={{ stage }} service=backup"
hetzner_server_type: "{{ hetzner_server_type_bastelserver | default('cx21') }}"
hetzner_server_labels: "stage={{ stage }} service=bastelserver"
docker_enabled: false
traefik_enabled: false
filebeat_enabled: false
common_pip_dependencies: []
custom_platform_users:
- backuphamster
node_exporter_enabled: false

@ -1,18 +1,20 @@
---
hetzner_server_type: cx22
hetzner_server_type: cx21
hetzner_server_labels: "stage={{ stage }} service=connect{% if tenant_id is defined %} tenant={{ tenant_id }}{% endif %}"
# unique id for a service, will be used for service access management as well (e.g. keycloak realm)
connect_client_id: "{{ cluster_name }}"
connect_postgres_host: "{{ shared_service_postgres_primary }}"
connect_postgres_host: "{{ shared_service_postgres_01_hostname }}"
connect_postgres_database: "{{ stage }}_{{ tenant_id }}_{{ cluster_name }}_connect"
connect_postgres_username: "{{ connect_postgres_database }}"
connect_postgres_password: "connect-postgres-admin"
connect_elastic_host: "{{ shared_service_connect_data_hostname }}"
connect_elastic_username: "{{ shared_service_connect_data_username }}"
connect_elastic_password: "{{ shared_service_connect_data_password }}"
#connect_process_search_module: "external"
connect_elastic_host: "{{ shared_service_elastic_stack_01_hostname }}"
connect_elastic_username: "{{ elastic_admin_username }}"
connect_elastic_password: "{{ elastic_admin_password }}"
connect_elastic_ca: "file:/usr/share/smardigo/ca.crt"
connect_elastic_prefix: "{{ stage }}_{{ tenant_id }}_{{ cluster_name }}"
@ -24,17 +26,17 @@ connect_auth_module: "oidc"
connect_oidc_client_id: "{{ cluster_name }}"
connect_oidc_client_secret: "{{ cluster_name }}"
connect_oidc_registration_id: "{{ cluster_name }}"
connect_oidc_issuer_uri: "https://{{ shared_service_hostname_keycloak }}/auth/realms/{{ current_realm_name }}"
connect_oidc_issuer_uri: "https://{{ shared_service_keycloak_hostname }}/auth/realms/{{ current_realm_name }}"
connect_password_change_url: "{{ http_s }}://{{ shared_service_hostname_keycloak }}/auth/realms/{{ current_realm_name }}/account/?referrer=security-admin-console#/security/signingin"
connect_user_management_url: "{{ http_s }}://{{ shared_service_hostname_keycloak }}/auth/admin/{{ current_realm_name }}/console"
connect_password_change_url: "{{ http_s }}://{{ shared_service_keycloak_hostname }}/auth/realms/{{ current_realm_name }}/account/password"
connect_iam_user_management_url: "{{ http_s }}://{{ shared_service_keycloak_hostname }}/auth/admin/{{ current_realm_name }}/console"
connect_mail_properties_simulation: false
connect_loglevel_message_queue: "DEBUG"
connect_loglevel_document_index: "DEBUG"
connect_loglevel_workflow_index: "DEBUG"
connect_loglevel_workflow_analysis: "DEBUG"
connect_csrf_token_name: "21f4d682-dbad-45e5-b3b5-47d274b9772d"
connect_csrf_token_value: "4d2ef8cc-f7d9-46d4-b4d6-f20f9dc48040"
process_search_postgres_host: "{{ shared_service_postgres_primary }}"
process_search_postgres_database: "{{ stage }}_{{ tenant_id }}_{{ cluster_name }}_process_search"
process_search_postgres_username: "{{ process_search_postgres_database }}"
process_search_postgres_password: "connect-postgres-admin"

@ -1,3 +0,0 @@
---
connect_datasource_action_enabled: "true"

@ -1,3 +0,0 @@
---
connect_search_elastic_enabled: "true"

@ -0,0 +1,3 @@
---
connect_webdav_enabled: "true"

@ -1,4 +0,0 @@
---
connect_wordpress_maria_database: "{{ stage }}_{{ tenant_id }}_{{ cluster_name }}_connect_wordpress"
connect_wordpress_maria_username: "{{ connect_wordpress_maria_database }}"
connect_wordpress_maria_password: "connect-wordpress-maria-admin"

@ -0,0 +1,14 @@
---
connect_wordpress_maria_host: "{{ shared_service_maria_hostname }}"
connect_wordpress_maria_database: "{{ stage }}_{{ tenant_id }}_{{ cluster_name }}_connect_wordpress"
connect_wordpress_maria_username: "{{ connect_wordpress_maria_database }}"
connect_wordpress_maria_password: "connect-wordpress-maria-admin"
connect_wordpress_oidc_issuer: "{{ http_s }}://{{ shared_service_keycloak_hostname }}/auth/realms/{{ current_realm_name }}"
connect_wordpress_oidc_provider_url: "{{ http_s }}://{{ shared_service_keycloak_hostname }}/auth/realms/{{ current_realm_name }}"
connect_wordpress_oidc_client_id: "{{ cluster_name }}"
connect_wordpress_oidc_client_secret: "{{ cluster_name }}"
connect_wordpress_buergerportal_username: "buergerportal"
connect_wordpress_buergerportal_password: "buergerportal"

@ -1,3 +0,0 @@
---
connect_workflow_heatmap_enabled: "true"

@ -1,6 +1,6 @@
---
hetzner_server_type: cpx31
hetzner_server_type: cx31
hetzner_server_labels: "stage={{ stage }} service=elastic"
traefik_enabled: false

@ -1,5 +1,6 @@
---
hetzner_server_type: cx22
hetzner_server_type: cx21
hetzner_server_labels: "stage={{ stage }} service=gitea"
gitea_id: "{{ inventory_hostname }}-gitea"
@ -7,7 +8,11 @@ gitea_postgres_id: "{{ inventory_hostname }}-postgres-gitea"
gitea_base_url: "{{ inventory_hostname }}.{{ domain }}"
gitea_postgres_host: "{{ shared_service_postgres_primary }}"
# unique id for a service, will be used for service access management as well (e.g. keycloak realm)
gitea_client_id: "{{ cluster_name }}"
gitea_client_secret: "{{ cluster_name }}"
gitea_postgres_host: "{{ shared_service_postgres_01_hostname }}"
gitea_postgres_database: "{{ stage }}_gitea"
gitea_postgres_username: "{{ gitea_postgres_database }}"
gitea_postgres_password: "gitea-postgres-admin"

@ -0,0 +1,9 @@
---
hetzner_server_type: cx21
hetzner_server_labels: "stage={{ stage }} service=gw"
docker_enabled: false
traefik_enabled: false
filebeat_enabled: false
node_exporter_enabled: false

@ -1,77 +1,4 @@
---
hetzner_server_type: cpx31
hetzner_server_labels: "stage={{ stage }} service=harbor"
harbor_homedir: "/data"
harbor_pgdatadir_lvm_hcloudvol_size: 50
harbor_pgdatadir_lvm_hcloudvol_count: 1
harbor_pgdatadir_lvm_hcloudvol_mountpath: "{{ harbor_homedir }}"
filebeat_inputs:
- type: log
paths:
- /var/log/harbor/portal.log
fields:
harbor: true
harbor-component: harbor-portal
- type: log
paths:
- /var/log/harbor/exporter.log
fields:
harbor: true
harbor-component: harbor-exporter
- type: log
paths:
- /var/log/harbor/redis.log
fields:
harbor: true
harbor-component: redis
- type: log
paths:
- /var/log/harbor/registryctl.log
fields:
harbor: true
harbor-component: registryctl
- type: log
paths:
- /var/log/harbor/chartmuseum.log
fields:
harbor: true
harbor-component: chartmuseum
- type: log
paths:
- /var/log/harbor/trivy-adapter.log
fields:
harbor: true
harbor-component: trivy-adapter
- type: log
paths:
- /var/log/harbor/postgresql.log
fields:
harbor: true
harbor-component: harbor-db
- type: log
paths:
- /var/log/harbor/jobservice.log
fields:
harbor: true
harbor-component: harbor-jobservice
- type: log
paths:
- /var/log/harbor/proxy.log
fields:
harbor: true
harbor-component: nginx
- type: log
paths:
- /var/log/harbor/registry.log
fields:
harbor: true
harbor-component: registry
- type: log
paths:
- /var/log/harbor/core.log
fields:
harbor: true
harbor-component: harbor-core

@ -1,3 +1,4 @@
---
hetzner_server_type: cx22
hetzner_server_type: cx21
hetzner_server_labels: "stage={{ stage }} service=iam"

@ -1,11 +1,9 @@
---
ip: "{{ stage_private_server_ip | default('### use dynamic inventory ###') }}"
ip: "{{ stage_private_server_ip | default('-') }}"
### parameters used by kubespray ###
kube_version: v1.23.16
cloud_provider: external
kube_network_plugin: calico
kube_proxy_metrics_bind_address: 0.0.0.0:10249
@ -24,16 +22,5 @@ helm_enabled: true
#TODO configuration migration needed
#krew_enabled: true
kube_service_addresses: 10.233.0.0/18
kube_pods_subnet: 10.233.64.0/18
kube_network_node_prefix: 24
## Automatically renew K8S control plane certificates on first Monday of each month
auto_renew_certificates: true
# First Monday of each month
# auto_renew_certificates_systemd_calendar: "Mon *-*-1,2,3,4,5,6,7 03:{{ groups['kube_control_plane'].index(inventory_hostname) }}0:00"
unsafe_show_logs: true
additional_pip_dependencies:
- kubernetes

@ -3,7 +3,7 @@
hetzner_server_type: cx11
hetzner_server_labels: "stage={{ stage }} service=keycloak"
keycloak_postgres_host: "{{ shared_service_postgres_primary }}"
keycloak_postgres_host: "{{ shared_service_postgres_01_hostname }}"
keycloak_postgres_database: "{{ stage }}_keycloak"
keycloak_postgres_username: "{{ keycloak_postgres_database }}"
keycloak_postgres_password: "keycloak-postgres-admin"

@ -1,8 +1,9 @@
---
hetzner_server_type: "{{ hetzner_server_type_kube_cpl | default('cpx21') }}"
hetzner_server_type: "{{ hetzner_server_type_kube_master | default('cpx21') }}"
hetzner_server_labels: "stage={{ stage }} service=kube_control_plane"
docker_enabled: false
traefik_enabled: false
filebeat_enabled: false
node_exporter_enabled: false

@ -6,3 +6,4 @@ hetzner_server_labels: "stage={{ stage }} service=kube_node"
docker_enabled: false
traefik_enabled: false
filebeat_enabled: false
node_exporter_enabled: false

@ -1,4 +1,5 @@
---
hetzner_server_labels: "stage={{ stage }} service=logstash"
traefik_enabled: false

@ -1,2 +1,53 @@
---
hetzner_server_type: cx22
hetzner_server_type: cx21
connect_image_version: "latest"
connect_admin_username: "{{ management_admin_username }}"
connect_admin_password: "{{ management_admin_password }}"
connect_workflow_env: "stage:{{ stage }};smardigoUserToken:{{ smardigo_auth_token_value }}"
connect_process_search_module: "external"
connect_oidc_client_secret: "{{ management_oidc_client_secret }}"
spring_profiles_include: "prod,postgres,elastic,swagger"
tenant_id: "{{ management_oidc_realm }}"
cluster_size: "1"
cluster_name: "{{ management_oidc_client_id }}"
current_realm_name: "management"
current_realm_display_name: "Stage Management"
postgres_acls:
- name: "{{ connect_postgres_database }}"
password: "{{ connect_postgres_password }}"
trusted_cidr_entry: "{{ shared_service_network }}"
current_realm_clients: [
{
name: '{{ management_oidc_client_id }}',
clientId: "{{ management_oidc_client_id }}",
admin_url: '',
root_url: '',
redirect_uris: [
"{{ http_s }}://{{ connect_base_url }}/*"
],
secret: '{{ management_oidc_client_secret }}',
web_origins: [
"{{ http_s }}://{{ connect_base_url }}"
],
}
]
current_realm_users: [
{
"username": "{{ management_admin_username }}",
"password": "{{ management_admin_password }}",
"email": "{{ connect_admin_email }}",
"requiredActions": []
}
]
current_realm_admin_user:
username: "{{ management_realm_admin_username }}"
password: "{{ management_realm_admin_password }}"
email: "{{ connect_admin_email }}"
requiredActions: []

@ -7,9 +7,6 @@ mysql_databases: []
mysql_users: []
docker_enabled: false
traefik_enabled: false
filebeat_maria_enabled: true
custom_platform_users:
- '{{ backupuser_user_name }}'
filebeat_enabled: false

@ -0,0 +1,20 @@
---
hetzner_server_type: cx11
hetzner_server_labels: "stage={{ stage }} service=pdns"
pdns_id: "{{ inventory_hostname }}-pdns"
pdns_postgres_id: "{{ inventory_hostname }}-postgres-pdns"
pdns_recursor_id: "{{ inventory_hostname }}-recursor-pdns"
pdns_admin_id: "{{ inventory_hostname }}-admin-pdns"
pdns_admin_postgres_id: "{{ inventory_hostname }}-admin-postgres-pdns"
#pdns_api_key: "< see vault >"
pdns_postgres_host: "{{ shared_service_postgres_01_hostname }}"
pdns_postgres_database: "{{ stage }}_pdns"
pdns_postgres_username: "{{ pdns_postgres_database }}"
pdns_postgres_password: "pdns-postgres-admin"
pdns_admin_postgres_database: "{{ stage }}_pdns_admin"
pdns_admin_postgres_username: "{{ pdns_admin_postgres_database }}"
pdns_admin_postgres_password: "pdns-admin-postgres-admin"

@ -1,18 +1,10 @@
---
hetzner_server_type: cpx11
hetzner_server_labels: "stage={{ stage }} service=postgres role={{ server_type }}"
hetzner_server_labels: "stage={{ stage }} service=postgres"
postgres_acls: []
docker_enabled: false
traefik_enabled: false
filebeat_postgres_enabled: true
custom_platform_users:
- "{{ backupuser_user_name }}"
postgres_homedir: "/var/lib/postgresql"
postgres_pgdatadir_lvm_hcloudvol_size: 10
postgres_pgdatadir_lvm_hcloudvol_count: 1
postgres_pgdatadir_lvm_hcloudvol_mountpath: "{{ postgres_homedir }}"
filebeat_enabled: false

@ -1,3 +0,0 @@
---
shared_service_postgres_primary: "{{ stage }}-postgres01-01"
shared_service_postgres_secondary: "{{ stage }}-postgres01-02"

@ -1,3 +0,0 @@
---
shared_service_postgres_primary: "{{ stage }}-postgres02-01"
shared_service_postgres_secondary: "{{ stage }}-postgres02-02"

@ -1,3 +1,4 @@
---
hetzner_server_type: cx22
hetzner_server_type: cx21
hetzner_server_labels: "stage={{ stage }} service=prometheus"

@ -0,0 +1,11 @@
---
hetzner_server_type: cx11
hetzner_server_labels: "stage={{ stage }} service=redis"
docker_enabled: false
traefik_enabled: false
redis_bind_interface: 0.0.0.0
redis_maxmemory: '{{ ansible_memtotal_mb * 0.8 | int }}'
redis_exporter_ip: "{{ ansible_ens10.ipv4.address | default('127.0.0.1') }}"

@ -1,15 +0,0 @@
---
hetzner_server_type: "{{ hetzner_server_type_restore_database | default('cpx21') }}"
hetzner_server_labels: "stage={{ stage }} service=restore database_engine={{ database_engine | default('') }}"
docker_enabled: false
traefik_enabled: false
filebeat_enabled: false
node_exporter_enabled: false
custom_platform_users:
- "{{ backupuser_user_name }}"
# postgresql related
# defining type of server (master|slave|restore)
server_type: restore

@ -1,6 +0,0 @@
---
awx_admin_username: "awx-admin"
awx_admin_password: "{{ awx_admin_password_vault }}"
awx_hetzner_ansible_revision: "prodnso"
awx_custom_ee_image: "/prodnso/awx/awx-custom-ee"

@ -1,2 +0,0 @@
---
backup_lvm_hcloudvol_size: 20

@ -1,14 +0,0 @@
---
harbor_bootstrap_helm_url: "prodnso-harbor-01.smardigo.digital/infrastructure"
harbor_bootstrap_helm_name: "infrastructure"
harbor_bootstrap_username: "{{ harbor_bootstrap_username_vault }}"
harbor_bootstrap_password: "{{ harbor_bootstrap_password_vault}}"
gitea_bootstrap_url: "https://demompmx-gitea.smardigo.digital/demompmx/demompmx-argocd"
gitea_bootstrap_username: "{{ gitea_admin_username }}"
gitea_bootstrap_password: "{{ gitea_admin_password }}"
custom_ip_whitelist:
- "5.75.131.94"
- "116.203.156.144"
- "91.107.225.163"

@ -1,6 +0,0 @@
---
connect_ribbon_display: "demo only"
smardigo_default_theme: "/themes/mpm-x.json"
connect_name: "mpmX execution"
connect_language_code: "en"

@ -1,23 +0,0 @@
---
stage_database_management_connect_name: "{{ stage }}_infrastructure_management_connect"
stage_database_management_connect_password: "connect-postgres-admin"
stage_database_management_process_search_name: "{{ stage }}_infrastructure_management_process_search"
stage_database_management_process_search_password: "connect-postgres-admin"
stage_database_management_keycloak_name: "{{ stage }}_infrastructure_management_keycloak"
stage_database_management_keycloak_password: "keycloak-postgres-admin"
stage_database_management_gitea_name: "{{ stage }}_infrastructure_management_gitea"
stage_database_management_gitea_password: "gitea-postgres-admin"
stage_postgres_acls:
- name: "{{ stage_database_management_connect_name }}"
password: "{{ stage_database_management_connect_password }}"
trusted_cidr_entry: "{{ shared_service_network }}"
- name: "{{ stage_database_management_process_search_name }}"
password: "{{ stage_database_management_process_search_password }}"
trusted_cidr_entry: "{{ shared_service_network }}"
- name: "{{ stage_database_management_keycloak_name }}"
password: "{{ stage_database_management_keycloak_password }}"
trusted_cidr_entry: "{{ shared_service_network }}"
- name: "{{ stage_database_management_gitea_name }}"
password: "{{ stage_database_management_gitea_password }}"
trusted_cidr_entry: "{{ shared_service_network }}"

@ -1,170 +0,0 @@
---
hcloud_firewall_objects:
-
name: "{{ stage }}-default"
state: present
rules:
-
direction: in
protocol: icmp
port: ''
source_ips: '{{ ip_whitelist }}'
destination_ips: []
description: ICMP allowed
-
direction: in
protocol: tcp
port: '22'
source_ips: '{{ ip_whitelist }}'
destination_ips: []
description: SSH allowed
-
direction: in
protocol: tcp
port: '80'
source_ips: '{{ ip_whitelist }}'
destination_ips: []
description: HTTP allowed
-
direction: in
protocol: tcp
port: '443'
source_ips: '{{ ip_whitelist }}'
destination_ips: []
description: HTTPS allowed
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }}'
-
name: "{{ stage }}-monitoring"
state: present
rules:
-
direction: in
protocol: tcp
port: '9080-9085'
source_ips: '{{ ip_whitelist + [ lookup("community.general.dig", stage + "-prometheus-01." + domain ) + "/32"] }}'
destination_ips: []
description: 'Server/Service Monitoring'
-
direction: in
protocol: tcp
port: '9001'
source_ips: '{{ ip_whitelist }}'
destination_ips: []
description: 'PgAdmin'
-
direction: in
protocol: tcp
port: '9187'
source_ips: '{{ ip_whitelist }}'
destination_ips: []
description: 'Postgres-Exporter'
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }}'
-
name: "{{ stage }}-monitoring-extern-https"
state: present
rules:
-
direction: in
protocol: tcp
port: '443'
source_ips:
- "{{ lookup('community.general.dig', '{{ shared_service_hostname_blackbox_exporter }}' ) }}/32"
destination_ips: []
description: null
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=connect'
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=keycloak'
-
name: "{{ stage }}-access-to-kubernetes-api"
state: present
rules:
-
direction: in
protocol: tcp
port: '6443'
source_ips: "{{ ip_whitelist }}"
destination_ips: []
description: "Allow access for whitelisted ips"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=kube_control_plane'
-
name: "{{ stage }}-access-to-connect"
state: present
rules:
-
direction: in
protocol: tcp
port: '443'
source_ips:
- '0.0.0.0/0'
destination_ips: []
description: "Whitelisting ALL(also from UNTRUST) incoming HTTPS traffic for connect-instance(s)"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=connect'
hcloud_firewall_objects_keycloak:
-
name: "{{ stage }}-access-to-keycloak"
state: present
rules:
-
direction: in
protocol: tcp
port: '443'
source_ips:
- '0.0.0.0/0'
destination_ips: []
description: "Whitelisting ALL(also from UNTRUST) incoming HTTPS traffic for keycloak-instance(s))"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=keycloak'
hcloud_firewall_objects_cockpit:
-
name: "{{ stage }}-access-to-cockpit"
state: present
rules:
-
direction: in
protocol: tcp
port: '443'
source_ips:
- '0.0.0.0/0'
destination_ips: []
description: "Whitelisting ALL(also from UNTRUST) incoming HTTPS traffic for cockpit-instance(s))"
-
direction: in
protocol: tcp
port: '80'
source_ips:
- '0.0.0.0/0'
destination_ips: []
description: "Whitelisting ALL(also from UNTRUST) incoming HTTPS traffic for cockpit-instance(s))"
apply_to:
-
type: label_selector
label_selector:
selector: 'stage={{ stage }},service=cockpit'

@ -1,5 +0,0 @@
---
gitea_admin_username: "gitea-admin"
gitea_admin_password: "{{ gitea_admin_password_vault }}"
gitea_postgres_username: "gitea-postgres"
gitea_postgres_password: "{{ gitea_postgres_password_vault }}"

@ -1,4 +0,0 @@
---
grafana_signing_secret: "{{ grafana_signing_secret_vault }}"
grafana_admin_username: "grafana-admin"
grafana_admin_password: "{{ grafana_admin_password_vault }}"

@ -1,7 +0,0 @@
---
kubernetes_with_externaldns: true
kubernetes_with_certmanager: true
kubernetes_with_ingress: true
kubernetes_with_gitea: true
kubernetes_with_awx: true

@ -1,2 +0,0 @@
---
logstash_ssl_enabled: false

@ -1,2 +0,0 @@
---
connect_connection_harbor_url: "{{ shared_service_url_harbor }}/v2/prodnso/"

@ -1,34 +0,0 @@
---
stage: "demompmx"
hetzner_server_type_kube_cpl: cpx21
hetzner_server_type_kube_node: cpx31
# TODO read configuration with hetzner rest api
shared_service_network: "10.0.0.0/16"
netgo_msteams_hook_cd: "{{ netgo_msteams_hook_cd_vault }}"
netgo_msteams_hook_alerting: "{{ netgo_msteams_hook_alerting_vault }}"
pgadmin4_admin_username: "{{ pgadmin4_admin_email }}"
pgadmin4_admin_password: "{{ pgadmin4_admin_password_vault }}"
shared_service_hostname_gitea: "{{ shared_service_kube_hostname_gitea }}"
shared_service_hostname_harbor: "{{ shared_service_kube_hostname_harbor }}"
shared_service_iam_hostname: "{{ stage }}-iam-01.{{ domain_env }}"
shared_service_mail_hostname: "{{ stage }}-mail-01.{{ domain_env }}"
connect_jwt_enabled: true
connect_jwt_secret: "06aa5b66a2e241b7af934035df79e8a8"
iam_jwt_enabled: true
iam_jwt_secret: "b9bb2282a3284bf291173ef202928004"
harbor_admin_username: "{{ harbor_admin_username_vault }}"
harbor_admin_password: "{{ harbor_admin_password_vault }}"
shared_service_url_kibana: "{{ shared_service_kube_url_kibana }}"
shared_service_hostname_kibana: "{{ shared_service_kube_hostname_kibana }}"
elastic_admin_username: "{{ elastic_admin_username_vault }}"
elastic_admin_password: "{{ elastic_admin_password_vault }}"

@ -1,15 +0,0 @@
---
prometheus_admin_username: "prometheus-admin"
prometheus_admin_password: "{{ prometheus_admin_password_vault }}"
prometheus_admin_password_htpasswd: "{{ prometheus_admin_password_htpasswd_vault }}"
alertmanager_admin_username: "alertmanager-admin"
alertmanager_admin_password: "{{ alertmanager_admin_password_vault }}"
alertmanager_admin_password_htpasswd: "{{ alertmanager_admin_password_htpasswd_vault }}"
# federation for k8s prometheus -> stage prometheus
prometheus_federation_enabled: false
prometheus_alertmanager_enabled: false
prometheus_prom2teams_enabled: false
prometheus_grafana_enabled: false

@ -1,14 +0,0 @@
---
shared_service_url_harbor: "{{ shared_service_kube_url_harbor }}"
shared_service_url_keycloak: "https://{{ shared_service_hostname_keycloak }}"
shared_service_hostname_keycloak: "{{ stage }}-keycloak.{{ domain_env }}"
shared_service_custom_additional_hosts:
- name: "demompmx-connect-data.smardigo.digital"
ip: "{{ shared_service_kube_loadbalancer_private_ip }}"
iam_image_name: '{{ shared_service_hostname_harbor }}/prodnso/smardigo/iam-app'
connect_image_name: "{{ shared_service_hostname_harbor }}/prodnso/smardigo/connect-whitelabel-app"
process_search_image_name: "{{ shared_service_hostname_harbor }}/prodnso/smardigo/process-search"
wordpress_image_name: "{{ shared_service_hostname_harbor }}/prodnso/smardigo/wordpress"

@ -1,143 +0,0 @@
$ANSIBLE_VAULT;1.1;AES256
31373136616539393331636662663331616138323331363263383163383737633437343263376163
6635343834386131323435653332343039353338613037370a303037643066396434383930616537
32613337346437633136323630633031343064333830373630633437393061333431386661343431
3939633730623534610a373136393531316234303338663931663030303537376135396132356531
61323032353534646663356631376666356539353666333633633831306163656430373066323863
31653232343533633036613538333137343864326462653763633331323134653439353862623932
30623531313763343737313433356139383334306163383564623062323561616666653837303735
64356164633139623464323336323830626236373634373234663135366462393032653763313030
62303831616430663231356133333634616335636465373735383964383230393265636536623734
34393965356132356136306231303666616462303334303735343864616363616432663964666464
38343666326238363361666639323832356638636166396562633535393233306361363161373862
38323738353863653361616364643436646461356363666465323633386533396235333434393332
37323361373366376463666335303737356333613830306635383930626462646438623862613231
64636661303737313666353835343936366330366264643635376332396265626334626630393332
64346131386232353763326665356464336564306130323162363635323864623461613934373464
63373832363538663237666630373661633262353561316662656130386531396331383733313133
30313065393063353330353039336337303934326363636136616335316139663065666163313236
37613434663361626631623765373362663337613239393264323935383336336634656332373266
65313838323831623234393764363931333630636636383439303433626263326232663062323233
65383165636532663463636462626162656531333463313939666131386531363866343637343234
61336666303335376636626237343038633162356437323565623866633737326333653734363834
35376232373232323834643138303638306563333266323739363362653661356132613131633738
63373463623139313833313139386364326234303335656532306465633636313462376334353763
38343339393866636230323131613635626663363065336266623933323133336566393165323631
62616136363262383438636137633631303465343563633461356338333162373936366235663538
30343436623132646531643036336430626133643036313930373135326537323932623636313566
36653037313964616163313432313566383631626164626531346164663064626266346431336464
62353832313464616338323931333737303636663837653835333561373361346132646437343161
34356130343539636133306164346563373230616234323964393239393031333331646631326663
63353763626134666464373935323663336531656535633664623736643837346131346537383736
37623438613733393831373365376231383636636364363934323463646331663938353931333962
39393066656237326337353730623631636632346631616664323135313562333837373161643061
30316162396464323961653135306535396630646639383165643537616265303330373733383730
39633432633562656561663562383030333637336636663431383438653237306164653734646262
39353137363965386439643664353434356263373964356234376361303530613062663134666437
63333866306365343161373838666463303933666533616635323262633130346163656162653439
61383037373139663561366665313666643032613632393265326530616662306334346465613763
32336238613361643662396639613138336134636338396134366639623334336132393839653839
34323638323332663330323663613235313766663435356634376535316561643764613936646430
34336539616335633732363137366131353963373632356131616163623734663262616237643234
38373662383633303739303539623861346263333465303335346234666261633132323739346161
64313261396239313138343962376364656565653839333333383535663839343561323336646336
65343062393662323431303561376232616438623061643561333838663762393263333263373730
34326434656331376361653761376165643561326238376633366163646433373465393337666339
64326530643939663931386634626364636163396431396633326239323438366364643835383064
32633065323066346366616239623530663861336566613965306638613634366433663539383833
31373831333363323364633036356264653434633863656465303837396132343466303262663532
62303466396434303739373738303539616535633566343637333865623732323965656261313462
66316131613938623732313531346436323933646231363464393435663565396633343131393032
37363033356265383365646163636533313536623366316330653565303661333031633132353535
32363961653964383231633931353162396330366466343639663130623664333132336364353062
36653363356562303235623735653965313833616562646334333639363834633964313633626534
39373235363263316532366166383133396339383237316561363130383330623663663366316231
38653965313466346266333834616437613036363662376339323734643864363561383032646461
39383935643430373539613262623138373537653431323462623138356338353666353661656637
36353030633135353937316636333332303931326233393131316361663564353662656534353837
38643666636134353439633633616666646665633962343639363863323064326630386135633661
65303332633934373131663731353036626661653434386464306666616136313933326565313862
38333132316135376532343932653235393061653563623666363161363131383735633362306564
65633135373433613738613464366334343937323333363063316163303064383130353931316433
63366537306633633133336465343830383333316235666237613665316561346266313162616130
63333661303566616337343963353933376463363133656534616465643133653830613266326165
32326133626366376535303030383636653632656164373764366338386232336239653432336439
31633164393562633337306431366330623733393963373033366265353462376235353333393735
61313939306261376535353166666536383566373235643366636335336361623833653735326532
64656334376539316162623832316136663036353964313836613138343037393534663438653564
30303265346537616632343764373038303462326162646666386238386132386231313863323333
30373665373234316437333264356130613436653532396166376633343038346264373461323335
65633336376333376338343365653236386636353637306239363633363933613263396661303033
38646531653335313830666261376532313336336338613562313264616135646638346232656130
61613332393038323264636164653131393838393662393233343037663639363039356438386135
64356434313236306165346137633564636264653739323138666235333032393666376535323566
66356632386338343635633637313832373365653732303833303631643437616265393363343334
32356631386662613865353333303437326538626137346661396663356630663039643931656138
65306162333162336265656464623761636464643232303330643436656430626138353034653030
31366235626165636130363663633966396436366363336332353336613861656434393939626361
61393834396636386462613362383061343432343436343334626431633032613836626334313863
34333365613332336365636136393864306538333266626536326138623466656239376235316235
34636634333136633438623636613562306461626163393433376433373330323361613235336239
33626133636432303532623662653866303965313662363837353737663239393361643937366534
32323765303232346163303264353266376263333733623762656632643833366335636232633834
37636362376665623437623730373737326261303835643336336363356130633637343966353731
62366335363065626164613864663032313133643363366131643266306164653539303239303530
62636535653035336464353138653366653239376361353639646566343762333063623535636665
35373034313034613064343264643464656532343464343863373034616236306236336238656164
34303831663466643766383136373331623361316131363632343735313738383337626466626263
38386564346633303539343862396238656339666631333461626266616238363361306364323432
38663539666663646363323433613464323762336665643732306262393638663063326461653036
39653532373537646237363966386339613833383264396465613134306637656563636461383432
36303739613332623339626335393662656136663631373834656636393765633938336662646137
65323736336630643334633637616664663338346261336135313333383434326465306333326133
32656237616664663964313130376664316234396432353036386462376331623462636539383233
33323936663663636433313436393132353061373031376464646666343532353539306665396464
38623839306534333164623762653565653063363066633135326236393164393732646436313862
32383232653062336539633462353761633938316362626666373664376362613036653836633964
65623761633166643639333136313935663433353965306665643939633665303365333162383833
33396566326139373831666666353764316232373066383036343236303636353761323564623134
36376433626230303138326162373432393932333763393930623462333162396664396634316631
38376538323032656266653236373237643366326362303565313930663438656534383239306338
39386639643731366638616538363835633130343738626238336531343666663561656436306361
64343963373135376638646666363233646539333130313134636434633161383763356531663964
33643261383062363438323164623235346631646236623139323635346534386137306638636535
31303166373934613764663639303135616331663336353163376632346262646636636531346162
31656661333138373762353737383835613065613964613231663266333632383963396462653231
33363231343563613261626264316332633934616362363137386238303339643030386630393333
64306236333339626630383637643663306135643664326465306563323430373731313331316134
39356264316533323635306639643738346361386261333632643265303865336166326232373462
30653937396662623861643430633630373535366430343239376661623837336565616661323239
37373766353730323037643934633164323764656632393734626261333033353261636239613234
38343566393832643938643433366565663661343530353465323533306430306331306365323631
36373163616564336437336566643539333439306638393264373631663331323134303738663935
39386434393238333739326532353839653465323932663033313136386334316438656162303665
30346664613635373438663166656662636530313762636465316632333365643266373235343233
39363934366333353136306363306134336433623031663965653833303361363932636663343933
62313164616661633035646131663438336232343263626531316234336263633839346162363361
35326338643131633066313466343334643631616564373637333734386436343865326335616630
39363763316262613338623631343934386436666666313361356531316632616631383734636539
39303939613361383136313261613363653338646534343934326133626338353935363666353430
36393237376430313338663438323630343930313865666333336432626565356465363731376436
36336433376466313438306166343539653161666438663538343638306663653862383035326434
35386132333031353438633039333633346237336239306637653362653038313465313464316630
32666363376436653263353237633333643536613337313337306164663630316337656161643630
39303831336531303634323761636330636136373861343639366164643864306230616566643863
36363362333739666234363030663731386638366433323063316265663839393932366231613131
35386166333835643565613964613432633635386134663366323637666666333764333735303332
62353264663063636563616565306266623362656565356435306261653234313762373739373938
61306539633832623362653461316638636230393838313037663330386662303830346132356565
64376334353330663639633839326362633762306635613631303464633561613235666437373961
31333663663234333237623566393065323930643661373532633430313432666435366537633062
30633261383935383534333439346230656262393137616366316535616235616666653334306137
62333235323338323864656539633638316633366265616466386564303065356364386234386235
31323161326361383962396439353335376437623133396264333339326436646633386238643666
64333461623533393434366132333738356533326566353935653034633161643532653965383038
65346432626664656435343065373736343763343935656563303938353939313862633562623861
66306137656230336238626337653231306631356130623936393061353736656461613936343666
38383732383862323264373366663864656630343532393432306435663262333465323630656136
61616162396233303939356362396565323232643231343530326236333763316437366330633134
31386264633336376563363737353365653238643339373163643332336663306230353064653335
32343437346431366138393433613565653764336236336133643932646237353733383931356162
38623538363538633961623461336633386530666636373666653735643762353730366631646166
31646330353862383466376666633934643164616533346265633430383766616461633565313935
3535653434646433663366643238313734633737343437393561

@ -1,28 +0,0 @@
$ANSIBLE_VAULT;1.1;AES256
31636439396239346133316438386130633933623133663134646435666433363863333638656432
3039626563386234636566363537616131313435656231390a666139303739383833346333643765
34653761306661353039363463393836313335316266316239346430653864333361656164353336
3766393230626638380a636131356239303432376563353063316665383165636234343261313037
31626531653332306366323233323364323437306566633434353966666435366162366230633739
33663664303138303566393963653733396434616631363037653137343637623137346464656336
61643832353339633132363663353162666431633834353439666439356131373462366663343032
64303362626464396238363535343466633330616238326131376336353636366366363264366137
35616139363034346463303465653066376130376434633264636334363766646563623366643231
33363235306235366135623131366431336561363836386665346438393632653836346539653233
66626361383561333238353136333332353836646639363066376665363238653165303434633861
37373365386162663739343537373163313663373537616130666163373334376266353262306130
32373161663732396163613035383232356462376232323238343432303931663864366334383137
66313037333031306366653963373865366161356635633565346433656532303932336238393764
64323061636534666137346231666435666433316665303238633164303331636465663632653332
33616463326632643331666463353435366632396266663738616130363838343162613535656132
39333536393738373537386237376235356231393565666337393237343436646330613935666439
30666639636234613236623537393734366630626332636165376333663962336339643534306336
65663435363737343935366236346637343734633861333031393737383363646264373463363735
66323462333533613563636331393562616331333332616565653133353636353864303637663064
63666466353432386534383537613831313634323266366364373934313032383364333631373435
64323661373764643638613731326565633663326239306132616362323661653537643562366236
33376436646238663634356536353563343830313862393963333766353266386333323334653231
62383430623262626362383439313236343861316364646663303865316538373639613065636361
39393963366466623663396230373837343531613565643034666131356435313063636235623132
65303864383161326364666366343133326435663832633932633532323765323931643933636364
373236343364616432313762313133346165

@ -1,80 +0,0 @@
$ANSIBLE_VAULT;1.1;AES256
37313336363731346436616536386636623333646135306365353738323239366366303730363038
3336643362616238333336323339663362346238626135330a323765633533666338383965653839
66373236383165343637653839616333613039306661373731376139303134313333653265363133
3066373037383936620a613839373138376432393437346366666231323761663738366464613737
37366532613130323334356436306433666535396231323132363461643863643061366163306535
66323838343130633938656331346333636431323237343535353464393633656430343164336233
62393236646264656633336438323633626661656433616237363730383835386235393332353433
32643732336365646534383532666333363861626533386238363263333265366631663333383735
65396239313538326232663065393434383536616330663361663736326265333464313235616633
31636632663737343338333732356137643065643935376566626365633765336237393231666332
34373034376133613164663332613464303265633831343765373664346436343564366533613736
61646265353133333965663165663364303337306362623264363865613938623637376235343335
31336362396530343462633466303463663139323166623736396165323139323966333731656131
31346665666630663332386231376539636230646534666537336632626637323739313539303064
37363764383839623338666465333939653561626162346664376439666136373166616531303966
62626339353764353434343539626130616533386263356237623365396438663939323966373237
64343663336335363264623466326637633764363162626563313637623831643434663139643637
35326335363166383065663965653665623065356635303739663036313066356639353233333032
66636539353135343336333831393961336139353638616161626331366530303338626266666666
30343965376264633739623034646239663463653138303239366338613664633431343130323034
65333866363638613831303532316236626531393732613462396238343062383562303964396336
38323965316539393862306131656462616363383136396138663865386664383036623232666238
39373136336334643762316161303639613466326665396462393934373133626335653561326336
35616539643439383237313830646565396135393238363864323334333861323536623338303737
61356566643238376633613538353435623536353763383762383738613266643733373838616266
34303862643238363332323334383264333434663332303130643266306461386533313837303466
36323336353636326436353631633862653134313762346362343863363761303164616134396437
35386232386134303437363065646663306530633631613062656333373961393462613234666162
62393137396337616539393161323063396264353264613864636161393735386431666666393335
66656463633238633034383932316466646433373930313333616637636431623463353363366664
64336139393265353732313731633733383966353961363165666130663631326634306338366566
33323230323166376638376430623961663036623765313936646630306366356430653065313266
66326532306235343066643432303938393535613766643734396262643338333162363339636462
66613664383039323166336363383139386535326434383936393631653733323662313232656161
63663835626563326338303566313736323964363738353965313265616537623463346437363835
64356561366662353537653737336463333434333464316239326136353431303237366262316561
35363836396366396263636363656163323634643632333333393966323862373162383537336538
39626433393931393639396537633639633233633266313435373863306232316137396632306163
30343061643337663634313334366630633835653932663236353239626431303761323135633735
38316230353935363031626565646539336133396665333464366165656335333337623338303837
63646431646632356132303464303665623738613937653865623231333865343736633837333865
30373230646163626461373735616538623866313930623533323462356538633635623536323963
38343131636131343461393535393434336564663732646534653438396634353063653937613131
35333566383731646261393038306336643563346134653630636537633537623931303262633432
33373838663332353465363437376330393137633133346363323737383634373863643230346136
31616434386633363739306239343930313466613065656664373331366263373161396165346530
64663264343433333561353232663635353037353466636562366435666235643665613461626561
39336139326337626566623031653765613235396461363733643436376430666533353135306135
33623064396639343163303938636665653337303438626634313034306563626463636239373932
61393339323133356331616665306334323363643165333334353837396434633634643661396561
62363638373537316237393635616636633465303332623961343139383634396437636561633165
65306433363036366265396438613161306334303866343635346632613837653435626366316235
33303630373838303965636564636462326239313130636666303032303363313330643339393539
61626566663336353265303235366634393130626232653363316634666436663030336539353835
37376532633465356235393565313334343665316164353733613638333763633034336564663634
33313464333036613736653238353033323331326236376436346461643634383930383336656263
65363734366166336465393664626435653066303134346364323961613234346532366635633532
65663565613066643932316638373530393031643132613032333964333237636664346630633634
66323030646330653835633133343062663834333231343631656533616164633632316135333937
31393039303933396232383864626434393063306436663364333930336438363261333930653462
32303037353330363838376264303736306138316331313964666337393734656266316331353964
31646665336338653330336330616265383962613562376331613963613630613037396435356338
34373666323263656334636531633165326431366666393930306336633530333736393233363562
66666463623437373535363765386565366334633032373261613031613632656135333838356365
61326265393237356630336235316263313437393937333336393162343664346366303030376537
33353866353139613233353739353961323937643962393665643937393637343631373739333234
30303563356363316336333835373164376132623631393964633236326563663336663737333339
39303162626466326566383939373335616139353665366130616634343865303232373265386330
31363861303832363535323030636333316434376362356530653139373761663463616263656435
31396531343932646661626632316436366264333162353937303437373631646235336239666164
66616133653137316432666632326333343732356161633263333630633766303261323334663663
64613132633036626433633961383935313139383932613139656463356631646531613064343365
31306438313631333338333730356332636163383662336362633535356337613935343762346338
66663162363163623837343835643236333663396638643365626266653331363161623132383231
30623935396535613532343032313262663961396233323531373734336633376562386638386230
61623761303731313434383530386562363438303330613431366537326163353666323239303661
64366438363134323135326337633032376532393731343537373862666465333237306131393665
63376437386134323535623766366461303237303361613065343635663263386633653032316461
373863313430343636663931633062313736

@ -1,50 +0,0 @@
$ANSIBLE_VAULT;1.1;AES256
38316431626139323830383536383065376138333165316365383164376631633261653735353731
6365356664653964346465613031373135396463376331340a613661653335346639363631353765
36613038393562373964623738393532323763663261326239386536633034353862393263323132
3638613932303763660a396666366235336465303964663563376333656465373338653637326265
32303162363837313561393936316636326434666133313734623039363231343632356534363435
31346166636334386637656563323266633736373233333234636336613934393765383438633531
62633539393866323634363435326664653932363163653665303063393732363331303935663566
65363161393136376461313738373266646462303232613735363135646134376134393966633161
30626363303335636134386234363733396238633435636263386162613235636234353132613534
66306634623535313436393463363236633436613261333039663837383837393763373039623166
37623237666338623337383330346661623139373036613833653934333737383463396539616339
65633239363466613539326663383261346231383566373139386465613434316362653766383032
39376432303563323035346436633663343430643561636238633838663062623964643632383463
32336562343837626561326137353833326262356238393138373137336532356134643764656462
64386663666633353663623561393166613461643037656334393864666637356437343932353561
31356335333761623261313530343961326566623561393337653531386162346537383531303437
39663265656236626232333730663161363139353431396364353139633462373762366466326661
66303961316539363333323764656534633863393938363634353132303662343938333064333935
32373733333334636363313937366132303233393763326463656238366439353737303436366437
63663331376561663238623865366363396133653838363531653261353436353631633765353163
37383632366638393339356430393561323864636537613037626365346465613531336264336564
63373634626165613166643331353935306366653233336132303035393661636164646536303533
38616230373531393863316239656562333161613934353331653935663731353963333238373761
64356566373266386535366562613466636365623034373332613366383432663262303431313465
37356562376661623566356130613931353937366163356337616365323131653266623366663663
34636265316463343230623332303332303735313935393466326133323861616233393363393063
39626436303865663132663338653563326666643536663935353239623830363363303231363034
30633833653135363838376663656665383830383661383863356338376337323263386235323436
36383634306534353864316564363562623439306263326333333565343334326635346238333438
31666336636337326262666331653131323365343663373835386335326364653166326531393866
36303966353838353165316138303066313539326137666232613033623936326234356237613966
37313135393139333265383230376434373338633764633730653835363836373137346664613135
32346638303765323666646462393466653631363966333830333137616335346439346237343539
66646664316233663362396638303863623163313432363862306366623436653864373434383833
37353037643434666263663134613936303961393135363661363263373935356337636333353262
31306261356266623139653563666634656262303030326136633231366135653662646535643032
35366237666431393234633062346565363765666531646662623932323335303938653735393562
31353835396531663132656134383730396664363562636361633663383266623330326234613533
30326132303661346230313833643935323964346331656133323636326466393032663436346536
64386632373966656466623736366437346661353266303630396166366634326266366561643462
37653031653233653935346665313039373865346263376432303533623165363564306532383231
37656134333137343264333964343130343862323930616464353538646133376536373837636631
62386662666465646365383363666637313665353535343333613362373764326336306163633762
33303764396461653164393066333437303165303462343634313339366230656563346533333865
31303135313762656139386334633732363265623630303662346538346166373165383237633730
66326264653964373434353833323061383632303530363936623130613436643964346535316334
35373361353334643132353134663636313334623539346235626363373139646333616561353865
62306461333062306261613833643665303639373831333861633937303134653836343430386438
65326565313161653138336262666333323936653838386634333738343730353636

@ -1,10 +0,0 @@
$ANSIBLE_VAULT;1.1;AES256
62326464383361393762396164366461366133353033623562626630303131373064653166323932
3364343534333766353431363662636334323863323731370a333530633639393239303063323966
32616536373232366232653030393962336465383864303030613232353263333936626162636135
3864313839616130350a396663323033613532346535393262623731343439346262393734323934
38613833626130346330613033313366393938356132383232353265356162393561623738663463
32333665613766643835643135653862303934663539386235623432313038376337636565653064
34373635393865373461363538643264373335653330356133366438383234663035343639313731
61393038656463323437343564336435326162303835336265663035636433616664313539636338
3035

@ -1,10 +0,0 @@
$ANSIBLE_VAULT;1.1;AES256
32616238386334306434636635653633656664383664333533363965346230303566623330396464
3634303337393330646330656637636634386230663134620a323065336137653730666230326633
32336566636130356461373435663335363233376535303465383430313661373439626337363432
3536343531336163610a326137383063666266313863666566666534333130323663643761663866
64356266376230643138663834306263346339616164303265306365653864346264386561636230
30383761663566366132666563373066643566663662336661656531613165353030626437376539
32363363353131643435613934353935623832333334663139343739633938373936633937356463
32363435326334316531323934303438646561616238396331663938323330643739626562386639
3861

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save