--- # Example call: # ansible-playbook upload-database-dump.yml --ask-vault-pass -e "cluster_features=['connect',...] cluster_size='1' stage='dev' upload_file='dumps/import.sql' uploaded_file='import.sql' # How this stuff works: # If `upload_file` is defined the upload role save the binary to `upload_directory` (default /tmp) # If `database_backup_file` and `target_database` are defined the import role imports from file basename `database_backup_file` to `target_database` # If both role conditions match the upload role trigger first. # Parameters: # playbook inventory # stage := the name of the stage (e.g. devnso, qanso, prodnso) # cluster_features := (services to setup, e.g. ['connect', 'wordpress', ...]) # upload_file := the local file to upload (e.g. dumps/wordpress_portal.sql) # database_backup_file := the dump file to import (e.g. wordpress_portal.sql) ############################################################# # Creating inventory dynamically for given parameters ############################################################# - name: 'apply setup to {{ host | default("maria") }}' hosts: '{{ host | default("postgress,maria") }}' serial: "{{ serial_number | default(5) }}" become: yes vars: ansible_ssh_host: "{{ stage_server_domain }}" pre_tasks: - name: "Check if ansible version is at least {{ ansible_minimal_version }}" assert: that: - ansible_version.string is version(ansible_minimal_version, ">=") msg: "The ansible version has to be at least {{ ansible_minimal_version }}" tags: - always roles: - role: upload_local_file vars: upload_owner: "{{ backupuser_user_name }}" upload_group: "{{ backupuser_user_name }}" upload_file: "smardigo/wordpress_portal.sql.gz" uploaded_file: "backups/wordpress_portal.sql.gz"