--- - name: Data manipulation tags: - data_manipulation - never hosts: localhost connection: local gather_facts: false tasks: - name: Return the input data's type tags: type_return ansible.builtin.set_fact: should_return_str: "{{ 'this' | type_debug }}" - name: Test types tags: type_test ansible.builtin.set_fact: # strings are classified as 'string', 'iterable' and 'sequence', but not 'mapping' aa_is_string: "{{ 'aa' is string }}" aa_is_iterable: "{{ 'aa' is iterable }}" aa_is_sequence: "{{ 'aa' is sequence }}" # numbers are classified as 'numbers', with 'integer' and 'float' being subclasses i42_is_number: "{{ 42 is number }}" i5_is_integer: "{{ 5 is integer }}" f21_34_is_number: "{{ 21.34 is number }}" f12_1_is_float: "{{ 12.1 is float }}" # lists are classified as 'iterable' and 'sequence', but not as 'string' nor 'mapping' list_is_iterable: "{{ ['list'] is iterable }}" list_is_sequence: "{{ ['list'] is sequence }}" list_is_string: "{{ ['list'] is string }}" list_is_mapping: "{{ ['list'] is mapping }}" # dictionaries are classified as 'iterable', 'sequence' and 'mapping', but not as 'string' dict_is_iterable: "{{ {'a': 'dict'} is iterable }}" dict_is_sequence: "{{ {'a': 'dict'} is sequence }}" dict_is_mapping: "{{ {'a': 'dict'} is mapping }}" dict_is_string: "{{ {'a': 'dict'} is string }}" # native booleans true_is_boolean: "{{ true is boolean }}" upper_true_is_boolean: "{{ True is boolean }}" false_is_boolean: "{{ false is boolean }}" upper_false_is_boolean: "{{ False is boolean }}" # null is None in ansible aa_is_not_null: "{{ 'aa' != None }}" aa_is_not_null_nor_empty: "{{ 'aa' not in [ None, '' ] }}" - name: Convert between types tags: type_conversion ansible.builtin.set_fact: string_to_int_is_integer: "{{ 'string' | int is integer }}" string_to_float_is_float: "{{ 'string' | float is float }}" integer_to_float_is_float: "{{ 12 | float is float }}" float_to_int_is_integer: "{{ 21.02 | int is integer }}" integer_to_string_is_string: "{{ 43 | string is string }}" float_to_string_is_string: "{{ 74.93 | string is string }}" integer_to_bool_is_boolean: "{{ 4 | bool is boolean }}" - name: Elvis operator tags: - elvis_operator - ternary # (condition) | ternary(value_for_true_condition, value_for_false_condition, optional_value_for_null_condition) ansible.builtin.set_fact: acme_directory: >- {{ this_is_a_test_run | default(true) | bool | ternary( 'https://acme-staging-v02.api.letsencrypt.org/directory', 'https://acme-v02.api.letsencrypt.org/directory' ) }} - name: Manipulate strings tags: string_manipulation ansible.builtin.set_fact: first_letter_to_uppercase: "{{ 'all_lowercase' | capitalize }}" something_replaced: "{{ 'dots.to.dashes' | replace('.','-') }}" split_string: "{{ 'testMe@example.com' | split('@') | first }}" pattern_replaced: >- {{ '*.domain.com...' | regex_replace('*' | regex_escape, 'star') | regex_replace('\.+$', '') }} - name: Manipulate lists tags: list_manipulation block: - name: Add elements to lists vars: programming_languages: - C - Python ansible.builtin.set_fact: programming_languages: "{{ programming_languages + ['Ruby'] }}" - name: Remove elements from lists vars: dbs_list: ['primary', 'sales'] ansible.builtin.set_fact: list_without_items: "{{ dbs_list | difference(['template0','template1','postgres','rdsadmin']) }}" - name: Get a random element ansible.builtin.set_fact: random_item: "{{ ['a','b','c'] | random }}" - name: Sort dict elements in lists by attribute tags: order_by vars: snapshots: - name: sales create_time: '2024-06-25T00:52:55.127000+00:00' - name: test create_time: '2024-05-17T01:53:12.103220+00:00' ansible.builtin.set_fact: snapshot_latest: "{{ snapshots | sort(attribute='create_time') | last }}" - name: Give back the first not null value tags: coalesce vars: list_with_null_values: - null - null - something - something else ansible.builtin.set_fact: first_non_null_value: "{{ list_with_null_values | select | first }}" - name: Get values for a specific attribute in a list of dictionaries ansible.builtin.set_fact: vpc_security_group_ids: >- {{ instance_information.vpc_security_groups | map(attribute='vpc_security_group_id') }} volume_ids: >- {{ instances_information.instances[0].block_device_mappings | map(attribute='ebs.volume_id') }} - name: Return only elements with specific attributes matching a filter ansible.builtin.set_fact: available_rds_snapshots: "{{ snapshots_list | selectattr('status', 'equalto', 'available') }}" mounts_with_path: "{{ ansible_facts.mounts | selectattr('mount', 'in', path) }}" - name: Return all elements *but* the ones with specific attributes matching a filter ansible.builtin.set_fact: available_rds_snapshots: "{{ snapshots_list | rejectattr('status', 'equalto', 'creating') }}" mounts_without_path: "{{ ansible_facts.mounts | rejectattr('mount', 'in', path) }}" - name: Remove lines about RDS protected users and permissions from a dump file # remove empty lines # remove comments # remove creation of the master user # remove anything involving 'rdsadmin' # remove changes to protected RDS users # remove protected 'superuser' and 'replication' assignments vars: # **Hack notice**: Ansible has issues with splitting on new lines if this template is quoted differently permissions_dump_content_as_lines: "{{ dump_file.content | ansible.builtin.b64decode | split('\n') }}" master_username: postgresql ansible.builtin.set_fact: permissions_commands: >- {{ permissions_dump_content_as_lines | reject('match', '^$') | reject('match', '^--') | reject('match', '^CREATE ROLE ' + master_username) | reject('match', '.*rdsadmin.*') | reject('match', '^(CREATE|ALTER) ROLE rds_') | map('regex_replace', '(NO)(SUPERUSER|REPLICATION)\s?', '') }} - name: Manipulate dictionaries tags: dictionary_manipulation vars: organization: address: 123 common lane id: 123abc block: - name: Add keys to dictionaries ansible.builtin.set_fact: organization: "{{ organization | combine({ 'name': 'ExampleOrg' }) }}" - name: Sort keys in dictionaries ansible.builtin.set_fact: organization: "{{ organization | dictsort }}" - name: Pretty print dictionaries ansible.builtin.set_fact: organization: "{{ organization | to_nice_json }}" - name: Merge dictionaries vars: dict_1: a: 43 b: some string dict_2: y: true z: - 4 - test ansible.builtin.set_fact: merged_dict: "{{ dict1 | ansible.builtin.combine(dict_2, {'z':'new_value','w':[44]}) }}" recursively_merged_dict: >- {{ {'rest':'test'} | ansible.builtin.combine({'z':'newValue','w':[44]}, dict_1, dict_2, recursive=true) }} - name: Register the list of extensions per DB vars: db_extensions: {} ansible.builtin.set_fact: db_extensions: >- {{ db_extensions | combine({ item.item: item.query_result | map(attribute='extname') }) }} with_items: "{{ db_extensions_query.results }}" - name: Register the list of extensions per DB as 'db:extensions[]' pairs vars: db_extensions: sales: - pgaudit - plpgsql countries: - pgcrypto - postgis - pg_stat_statements ansible.builtin.set_fact: db_extension_pairs: # Refer https://jinja.palletsprojects.com/en/3.0.x/templates/#assignments for the namespace object's # reason >- {%- set ns = namespace(output = []) -%} {%- for db in db_extensions.keys() -%} {%- for extension in db_extensions[db] -%} {{- ns.output.append({'db':db, 'extension': extension}) -}} {%- endfor -%} {%- endfor -%} {{- ns.output -}} - name: Get the device name and last snapshot id for all block devices in an EC2 instance # Useful to create AMIs from instance snapshots tags: - aws - ec2 - snapshot - ami ansible.builtin.set_fact: last_snap_for_device: # Refer https://jinja.palletsprojects.com/en/3.0.x/templates/#assignments for the namespace object's # reason >- {%- set ns = namespace(devices_list = []) -%} {%- for result in current_instance_snapshots.results -%} {%- for device in current_instance_information.instances[0].block_device_mappings | selectattr('ebs.volume_id', 'equalto', result.volume_id) -%} {{- ns.devices_list.append({ 'device_name': device.device_name, 'snapshot_id': result.snapshots | sort(attribute='start_time') | last | json_query('snapshot_id'), }) -}} {%- endfor -%} {%- endfor -%} {{ ns.devices_list }} - name: Flow control tags: - flow_control - never hosts: localhost connection: local gather_facts: false ignore_errors: true tasks: - name: Take pauses tags: - pause - sleep ansible.builtin.pause: seconds: 1 - name: Enforce failures tags: failure ansible.builtin.fail: msg: Manually enforced failure - name: Fail task on any non-compliance tags: assertion vars: installation_method: package url: https://www.google.com/ ansible.builtin.assert: that: - installation_method in ['container', 'package'] - "'https://www.google.com/' is ansible.builtin.url" - "'domain.example.com' is community.general.fqdn_valid(min_labels=2)" - url is regex('\w\.com/') fail_msg: What to say if any of the above conditions fail success_msg: What to say if all of the above conditions succeed - name: Execute notified handlers now tags: handler ansible.builtin.meta: flush_handlers - name: Retry failing tasks tags: - failure - retry changed_when: false ansible.builtin.command: /usr/bin/false retries: 3 delay: 1 register: command_result until: command_result is not failed - name: Error handling in blocks tags: error_handling block: - name: This executes normally ansible.builtin.debug: msg: I execute normally - name: This errors out changed_when: false ansible.builtin.command: /bin/false - name: This never executes ansible.builtin.debug: msg: I never execute due to the above task failing rescue: - name: This executes if any errors arose in the block ansible.builtin.debug: msg: I caught an error and can do stuff here to fix it always: - name: This always executes ansible.builtin.debug: msg: I always execute - name: Debugging tags: - debug - never hosts: localhost connection: local gather_facts: false ignore_errors: true tasks: - name: Output messages ansible.builtin.debug: msg: I always display! - name: Pretty print messages vars: install_method: package supported_install_methods: [ 'package' ] ansible.builtin.debug: msg: >- {{ dict([ [ 'install_method', install_method ], [ 'install_method in supported_install_methods', install_method in supported_install_methods ], ]) }} - name: Output variable values vars: install_method: package ansible.builtin.debug: var: install_method - name: Output messages depending on the verbosity level ansible.builtin.debug: msg: I only display with 'ansible-playbook -vvv' or with more 'v's verbosity: 3 - name: Start the debugger on failure # print all variables at this point => p task_vars # continue => c # abort and quit => q debugger: on_failed ansible.builtin.fail: msg: Manually enforced failure - name: Prompt for vars tags: - prompt - never hosts: localhost connection: local gather_facts: false vars_prompt: # only works at playbook level - name: target_db prompt: Target DB private: false default: localhost - name: db_password prompt: DB password default: whatever tasks: [] - name: Run tasks on different targets than the current one # Only *single* target patterns allowed # No groups tags: - never hosts: localhost connection: local gather_facts: false tasks: - name: Run on the controller delegate_to: 127.0.0.1 connection: local changed_when: false ansible.builtin.command: hostname - name: Run on other targets delegate_to: copernicus changed_when: false ansible.builtin.command: hostname - name: Reuse tasks tags: - never hosts: localhost connection: local gather_facts: false tasks: [] # - name: Statically use tasks from files # when: false # block: # - name: By using absolute paths and special variables (preferred) # ansible.builtin.import_tasks: # file: "{{ role_path }}/tasks/install/{{ install_method }}.yml" # - name: By using paths relative to the including file # ansible.builtin.import_tasks: # file: pre-flight.yml # - name: Conditionally include tasks # block: # - name: by leveraging the 'with_fileglob' loop filter (preferred) # ansible.builtin.include_tasks: # file: "{{ item }}" # with_fileglob: "{{ install_method }}.yml" # - name: by checking the files' existence # vars: # filename: "{{ install_method }}.yml" # when: lookup('ansible.builtin.fileglob', filename) != [] # ansible.builtin.import_tasks: # file: "{{ filename }}" # - name: Reuse playbooks # # only works at playbook level # vars: # var_for_playbook_1: value1 # ansible.builtin.import_playbook: path/to/playbook.yml # - name: Apply roles # hosts: localhost # connection: local # gather_facts: false # roles: # - role_name # - path/to/role # - role: role_name # - role: role_with_vars # vars: # var1_for_role_with_vars: value # tasks: # - name: Apply a role now # ansible.builtin.import_role: # name: role_name - name: Integrate Ansible Vault tags: - vault - never hosts: localhost connection: local gather_subset: - '!all' - min check_mode: true tasks: - name: Use encrypted values ansible.builtin.set_fact: var_from_encrypted_value: # password: '1q2w3e4r', plaintext value: 'very secret string' !vault | $ANSIBLE_VAULT;1.1;AES256 34646464653830386631363430386432666530356364313532313336373665613038633464376335 3539363530613130623638313063363165386230646566640a313438386133366137383939336637 33333365393337326239336264623462373064383663363234353635316538356461353061646563 3037306464363439340a663430313739393439363936613862316361353330363638323065383063 39613935613035343637336537643266313737666635313730353034373736353736 - name: Use encrypted files # The 'unvault' lookup plugin requires files to exist beforehand, but it is fine for them to be plaintext. # The 'file' lookup plugin requires files to exist beforehand, but decrypts vault-encryped files. tags: tls_certificate ansible.builtin.copy: dest: /etc/haproxy/certificate.pem content: | {{ lookup('ansible.builtin.unvault', 'path/to/cert/key.pem') | string | trim }} {{ lookup('ansible.builtin.unvault', 'path/to/cert/full_chain.pem') | string | trim }} mode: '0700' - name: Save data to encrypted files # Of fu*king course the 'vault' filter would use the 'filter_default' vault ID by default to encrypt content. # Set that parameter to '' to *not* specify a vault ID. vars: ansible_vault_password: >- {{ lookup('ansible.builtin.file', [playbook_dir, 'ansible_vault_password_file.txt'] | path_join) }} ansible.builtin.copy: dest: path/to/file decrypt: false # necessary if the file does not exist beforehand content: "{{ 'some string' | ansible.builtin.vault(ansible_vault_password, vault_id='') }}" mode: '0644' - name: Common operations tags: never hosts: localhost connection: local gather_subset: - '!all' - min check_mode: true tasks: - name: Create directories recursively ansible.builtin.file: path: /tmp/path/to/final/dir state: directory mode: '0775' - name: Define files content in tasks ansible.builtin.copy: dest: "{{ ansible_user_dir }}/.tmux.conf" mode: u=rw,go=r content: | … - name: Generate passwords ansible.builtin.set_fact: random_password: "{{ lookup('ansible.builtin.password', '/dev/null') }}" random_password_with_requirements: >- {{ lookup('ansible.builtin.password', '/dev/null length=32 chars=ascii_letters,digits,punctuation') }} random_but_idempotent_password: >- {{ lookup('ansible.builtin.password', '/dev/null', seed=inventory_hostname, length=16) }} - name: Look for files ansible.builtin.set_fact: path_list_of_all_txt_files_in_dir: "{{ lookup('ansible.builtin.fileglob', '/my/path/*.txt') }}" - name: Add repositories block: - name: To DNF/YUM when: ansible_pkg_mgr | lower in [ 'dnf', 'yum' ] ansible.builtin.yum_repository: name: epel description: EPEL YUM repo baseurl: https://download.fedoraproject.org/pub/epel/$releasever/$basearch/ - name: Install packages block: - name: Via package manager on any supported system ansible.builtin.package: name: - tmux - screen - name: Via PIP ansible.builtin.pip: name: - bottle - django>1.11.0,<1.12.0 - svn+http://myrepo/svn/MyApp#egg=MyApp - git+http://myrepo/app/MyApp - file:///path/to/MyApp.tar.gz - name: Run containers block: - name: Directly community.docker.docker_container: name: gitlab image: gitlab/gitlab-ce:16.11.2-ce.0 hostname: gitlab.lan published_ports: - "8022:22" - "8080:80" - "8443:443" env: GITLAB_OMNIBUS_CONFIG: >- external_url 'http://gitlab.lan'; shm_size: 256m volumes: - ./config:/etc/gitlab:Z - ./logs:/var/log/gitlab:Z - ./data:/var/opt/gitlab:Z auto_remove: true - name: With Compose community.docker.docker_compose_v2: project_src: /home/user/flask - name: Send messages to Slack channels vars: slack_notification_hook_url: https://hooks.slack.com/services/AB01CD23EF4/ABCD0123E/aBcDefGh0123456789iJKLmn block: - name: Send plain messages ansible.builtin.uri: url: "{{ slack_notification_hook_url }}" method: POST body_format: json body: text: (╥╯ᗝ╰╥) task XYZ failed - name: Send mrkdwn (Slack-specific markdown) text # FIXME: still to be tested ansible.builtin.uri: url: "{{ slack_notification_hook_url }}" method: POST body_format: json body: blocks: - type: section text: type: mrkdwn text: This is a *_fancy_* message - name: Use the users' home directory for something block: - name: Executing commands from specified users block: - name: Get users' homedir back become: true become_user: "{{ item }}" become_flags: -iH check_mode: false ansible.builtin.command: >- echo "{{ item }}: $HOME" changed_when: false with_items: - root - ec2-user register: users_homedir_retrieve - name: Compute and register the results tags: AnsibleUnsafeText_to_Dict ansible.builtin.set_fact: users_homedir: >- {{ users_homedir_retrieve | community.general.json_query('results[].stdout') | map('from_yaml') | combine }} - name: Do your thing! become: true become_user: "{{ item.key }}" ansible.builtin.file: path: "{{ item.value }}/placeholder" state: touch mode: '0755' with_dict: "{{ users_homedir }}" - name: From the system's entries block: - name: Get raw information from the system's entries ansible.builtin.getent: database: passwd key: "{{ item }}" split: ':' with_items: - root - ec2-user register: users_entries - name: Compute and register the results ansible.builtin.set_fact: users_info: >- {{ users_entries | community.general.json_query('results[].ansible_facts.getent_passwd[]') | combine }} - name: Do your thing! ansible.builtin.file: path: "{{ item.value[4] }}/placeholder" owner: "{{ item.key }}" state: touch mode: '0755' with_dict: "{{ users_info }}" - name: Cronjobs block: - name: At specific times become: true ansible.builtin.cron: name: Prometheus manual data backup cron_file: prometheus-manual-data-backup # Mind this is based on the hosts' time. hour: 4 minute: 0 user: root job: # - Keep '%' characters escaped or they'll be treated as newlines. # - Archive creation returns 1 if it detects changes to read files. # Using ';' instead of '&&' to ignore. > FILENAME="/tmp/prometheus-data-$(date +'\%s-\%F-\%H-\%m-\%S').tar.gz" && tar -czf "$FILENAME" '/var/lib/prometheus/data' ; tar -tf "$FILENAME" > '/dev/null' && aws s3 cp "$FILENAME" 's3://backups/prometheus/' && rm "$FILENAME" - name: AWS-specific operations tags: never hosts: localhost connection: local gather_facts: false check_mode: true tasks: - name: Apply roles on different targets than the current one block: [] # - name: Gather facts about the EC2 instance # when: instance_information.instance_ids | length > 0 # delegate_to: "{{ instance_information.instance_ids | first }}" # vars: # ansible_connection: aws_ssm # ansible_python_interpreter: /usr/bin/python3 # ansible.builtin.gather_facts: {} # register: fact_gathering # - name: Apply the role to the EC2 instance # when: # - fact_gathering is not skipped # - fact_gathering is not failed # delegate_to: "{{ instance_information.instance_ids | first }}" # delegate_facts: true # vars: # ansible_connection: aws_ssm # ansible_aws_ssm_timeout: 900 # ansible_python_interpreter: /usr/bin/python3 # some_role_var: some value # some_other_role_var: some value # ansible.builtin.import_role: # name: role name - name: Get the list of current public IP ranges # too many to be put into security group rules ansible.builtin.set_fact: ip_ranges: >- lookup('url', 'https://ip-ranges.amazonaws.com/ip-ranges.json', split_lines=False) | from_json | json_query('prefixes') | selectattr('region', 'equalto', 'eu-west-1') | selectattr('service', 'equalto', 'AMAZON') | map(attribute='ip_prefix') - name: Assume roles block: - name: Get session tokens amazon.aws.sts_assume_role: access_key: AKIA1EXAMPLE1EXAMPLE # optional if defined as environment variable secret_key: 123456789abcdefghijklmnopqrstuvwxyzABCDE # optional if defined as environment variable profile: someProfile # optional if defined as environment variable role_arn: "arn:aws:iam::123456789012:role/someRole" role_session_name: someRoleSession register: assumed_role - name: Use the assumed role to take action amazon.aws.ec2_tag: access_key: "{{ assumed_role.sts_creds.access_key }}" secret_key: "{{ assumed_role.sts_creds.secret_key }}" profile: null # required to use the assumed role's token, if profile is specified via environment variable session_token: "{{ assumed_role.sts_creds.session_token }}" resource: i-xyzxyz01 tags: MyNewTag: value - name: EC2 block: - name: Get running instances with 'K8S' as the 'Application' tag amazon.aws.ec2_instance_info: filters: "tag:Application": K8S instance-state-name: ["running"] - name: Clone EC2 instances vars: source_instance_id: i-0123456789abcdef0 block: - name: Get instance information from the original instance amazon.aws.ec2_instance_info: instance_ids: - "{{ source_instance_id }}" register: source_instance_info - name: Create an AMI of the original instance amazon.aws.ec2_ami: instance_id: "{{ source_instance_id }}" no_reboot: true # remove if the instance rebooting upon AMI creation is no biggie wait: true wait_timeout: 3600 # big volumes call for bit wait times (a 200GiB volume took ) name: ami-source register: source_ami - name: Use the AMI to launch clones identical to the original when: source_ami.image_id is defined amazon.aws.ec2_instance: name: clone vpc_subnet_id: "{{ source_instance_info.instances[0].subnet_id }}" instance_type: "{{ source_instance_info.instances[0].instance_type }}" image: id: "{{ source_ami.image_id }}" - name: RDS block: - name: Create an instance's snapshot block: - name: Create the snapshot amazon.aws.rds_instance_snapshot: db_instance_identifier: identifier-for-db-instance db_snapshot_identifier: identifier-for-db-snapshot register: snapshot_creation - name: Wait for the snapshot to be in the 'available' state when: snapshot_creation.snapshot_create_time is defined amazon.aws.rds_snapshot_info: db_snapshot_identifier: "{{ snapshot_creation.db_snapshot_identifier }}" register: snapshot_check retries: 3 delay: 120 until: snapshot_check.snapshots | selectattr('status', 'equalto', 'available') | length > 0 - name: Dump roles' privileges block: - name: Dump to file environment: PGPASSWORD: someRandomString vars: out_file: /tmp/instance-id_roles.sql ansible.builtin.command: >- pg_dumpall --host 'instance-id.0123456789ab.eu-west-1.rds.amazonaws.com' --port '5432' --user 'postgres' --database 'postgres' --no-password --roles-only --no-role-passwords --file '{{ out_file }}' changed_when: false - name: Dump to variable for later use through 'dump_execution.stdout_lines' environment: PGPASSWORD: someRandomString ansible.builtin.command: >- pg_dumpall -h 'instance-id.0123456789ab.eu-west-1.rds.amazonaws.com' -p '5432' -U 'postgres' -l 'postgres' -w -r --no-role-passwords changed_when: false register: dump_execution - name: Wait for pending changes to be applied amazon.aws.rds_instance_info: db_instance_identifier: identifier-for-db-instance register: instance_check retries: 12 delay: 15 until: instance_check.instances[0].pending_modified_values.keys() | length == 0 - name: Wait for AWS to realize some requests have been made ansible.builtin.pause: seconds: 60 - name: "AWS: Start stopped instances and add the first of them to the inventory for the next play" # works at playbook level tags: - never hosts: localhost connection: local check_mode: false handlers: - name: Add the first started host ansible.builtin.add_host: groups: - tag_Application_Postgres - tag_Component_Dumper name: "{{ started_instances.instance_ids[0] }}" - name: Add all started hosts with host variables loop: "{{ started_instances.instance_ids }}" ansible.builtin.add_host: groups: - tag_Application_Postgres - tag_Component_Dumper name: "{{ item }}" ansible_connection: aws_ssm ansible_aws_ssm_bucket_name: company-ssm-logs ansible_aws_ssm_region: eu-west-1 ansible_aws_ssm_timeout: 900 tasks: - name: Start the PG dumper instance tags: dumper amazon.aws.ec2_instance: filters: tag:Application: Postgres tag:Component: Dumper state: started register: started_instances notify: - Add the first started host - Add all started hosts with host variables # follow up with play using hosts 'tag_Application_Postgres,&tag_Component_Dumper' - name: AWX-specific operations tags: never hosts: localhost connection: local gather_facts: false check_mode: true environment: CONTROLLER_HOST: https://awx.example.org/ CONTROLLER_VERIFY_SSL: false CONTROLLER_USERNAME: admin CONTROLLER_PASSWORD: somethingSecret tasks: - name: Export all data from existing instances # At the time of writing: applications, credential_types, credentials, execution_environments, inventory, # inventory_sources, job_templates, notification_templates, organizations, projects, schedules, teams, and users. awx.awx.export: all: true register: awx_export_output - name: GitLab-specific operations tags: never hosts: localhost connection: local gather_facts: false check_mode: true tasks: - name: Install configured fleeting plugins when: runner_executor in [ "docker-autoscaler", "instance" ] become: true ansible.builtin.command: chdir: /root cmd: gitlab-runner fleeting install creates: /root/.config/fleeting/plugins - name: Let's Encrypt-specific operations tags: never hosts: localhost connection: local gather_facts: false check_mode: true tasks: # The 'acme_certificate' module takes in file paths for the certificate's files; those need either to *not* exist # beforehand, or their content to be in specific formats. - name: Revoke test certificates with account key community.crypto.acme_certificate_revoke: acme_directory: https://acme-staging-v02.api.letsencrypt.org/directory acme_version: 2 account_key_src: path/to/acme_account.key.pem certificate: path/to/certificate.crt.pem