mirror of
https://gitea.com/mcereda/oam.git
synced 2026-02-09 05:44:23 +00:00
1185 lines
47 KiB
YAML
1185 lines
47 KiB
YAML
---
|
|
|
|
- name: Data manipulation
|
|
tags:
|
|
- data_manipulation
|
|
- never
|
|
hosts: localhost
|
|
connection: local
|
|
gather_facts: false
|
|
tasks:
|
|
- name: Return the input data's type
|
|
tags: type_return
|
|
ansible.builtin.set_fact:
|
|
should_return_str: "{{ 'this' | type_debug }}"
|
|
- name: Test types
|
|
tags: type_test
|
|
ansible.builtin.set_fact:
|
|
# strings are classified as 'string', 'iterable' and 'sequence', but not 'mapping'
|
|
aa_is_string: "{{ 'aa' is string }}"
|
|
aa_is_iterable: "{{ 'aa' is iterable }}"
|
|
aa_is_sequence: "{{ 'aa' is sequence }}"
|
|
# numbers are classified as 'numbers', with 'integer' and 'float' being subclasses
|
|
i42_is_number: "{{ 42 is number }}"
|
|
i5_is_integer: "{{ 5 is integer }}"
|
|
f21_34_is_number: "{{ 21.34 is number }}"
|
|
f12_1_is_float: "{{ 12.1 is float }}"
|
|
# lists are classified as 'iterable' and 'sequence', but not as 'string' nor 'mapping'
|
|
list_is_iterable: "{{ ['list'] is iterable }}"
|
|
list_is_sequence: "{{ ['list'] is sequence }}"
|
|
list_is_string: "{{ ['list'] is string }}"
|
|
list_is_mapping: "{{ ['list'] is mapping }}"
|
|
# dictionaries are classified as 'iterable', 'sequence' and 'mapping', but not as 'string'
|
|
dict_is_iterable: "{{ {'a': 'dict'} is iterable }}"
|
|
dict_is_sequence: "{{ {'a': 'dict'} is sequence }}"
|
|
dict_is_mapping: "{{ {'a': 'dict'} is mapping }}"
|
|
dict_is_string: "{{ {'a': 'dict'} is string }}"
|
|
# native booleans
|
|
true_is_boolean: "{{ true is boolean }}"
|
|
upper_true_is_boolean: "{{ True is boolean }}"
|
|
false_is_boolean: "{{ false is boolean }}"
|
|
upper_false_is_boolean: "{{ False is boolean }}"
|
|
# null is None in ansible
|
|
aa_is_not_null: "{{ 'aa' != None }}"
|
|
aa_is_not_null_nor_empty: "{{ 'aa' not in [ None, '' ] }}"
|
|
- name: Convert between types
|
|
tags: type_conversion
|
|
ansible.builtin.set_fact:
|
|
string_to_int_is_integer: "{{ 'string' | int is integer }}"
|
|
string_to_float_is_float: "{{ 'string' | float is float }}"
|
|
integer_to_float_is_float: "{{ 12 | float is float }}"
|
|
float_to_int_is_integer: "{{ 21.02 | int is integer }}"
|
|
integer_to_string_is_string: "{{ 43 | string is string }}"
|
|
float_to_string_is_string: "{{ 74.93 | string is string }}"
|
|
integer_to_bool_is_boolean: "{{ 4 | bool is boolean }}"
|
|
- name: Test truthfulness
|
|
tags: truthfulness
|
|
ansible.builtin.set_fact:
|
|
this_is_true: true
|
|
this_is_false: false
|
|
this_is_true_again: "{{ not false }}"
|
|
true_is_truthy: "{{ true is truthy }}"
|
|
false_is_falsy: "{{ false is falsy }}"
|
|
- name: Elvis operator
|
|
tags:
|
|
- elvis_operator
|
|
- ternary
|
|
# (condition) | ternary(value_for_true_condition, value_for_false_condition, optional_value_for_null_condition)
|
|
ansible.builtin.set_fact:
|
|
acme_directory: >-
|
|
{{
|
|
this_is_a_test_run
|
|
| default(true)
|
|
| bool
|
|
| ternary(
|
|
'https://acme-staging-v02.api.letsencrypt.org/directory',
|
|
'https://acme-v02.api.letsencrypt.org/directory'
|
|
)
|
|
}}
|
|
- name: Manipulate strings
|
|
tags: string_manipulation
|
|
vars:
|
|
module_output: >-
|
|
u001b]0;@smth:/u0007{
|
|
"failed": 0, "started": 1, "finished": 0, "ansible_job_id": "j968817333249.114504",
|
|
"results_file": "/home/ssm-user/.ansible_async/j968817333249.114504", "_ansible_suppress_tmpdir_delete": true
|
|
}\r\r
|
|
pattern: >-
|
|
{{ '"failed": 0, "started": 1, "finished": 0' | regex_escape() }}
|
|
ansible.builtin.set_fact:
|
|
first_letter_to_uppercase: "{{ 'all_lowercase' | capitalize }}"
|
|
something_replaced: "{{ 'dots.to.dashes' | replace('.','-') }}"
|
|
split_string: "{{ 'testMe@example.com' | split('@') | first }}"
|
|
pattern_replaced: >-
|
|
{{ '*.domain.com...' | regex_replace('*' | regex_escape, 'star') | regex_replace('\.+$', '') }}
|
|
pattern_is_anywhere_in_module_output: "{{ module_output is search(pattern) }}"
|
|
pattern_is_at_the_beginning_of_string: "{{ 'sator arepo tenet opera rotas' is match('sator arepo') }}"
|
|
regex_is_anywhere_in_string: "{{ 'sator arepo tenet opera rotas' is regex('\\stenet\\s') }}"
|
|
first_substr_matching_regex: "{{ 'sator arepo tenet opera rotas' | regex_search('\\stenet\\s') }}"
|
|
value_from_json_string_in_module_output: >-
|
|
{{ 'ansible_job_id' | extract(module_output | regex_search('{.*}') | from_json) }}
|
|
- name: Manipulate lists
|
|
tags: list_manipulation
|
|
block:
|
|
- name: Add elements to lists
|
|
vars:
|
|
programming_languages:
|
|
- C
|
|
- Python
|
|
ansible.builtin.set_fact:
|
|
programming_languages: "{{ programming_languages + ['Ruby'] }}"
|
|
- name: Remove elements from lists
|
|
vars:
|
|
dbs_list: ['primary', 'sales']
|
|
ansible.builtin.set_fact:
|
|
list_without_items: "{{ dbs_list | difference(['template0','template1','postgres','rdsadmin']) }}"
|
|
- name: Get a random element
|
|
ansible.builtin.set_fact:
|
|
random_item: "{{ ['a','b','c'] | random }}"
|
|
- name: Sort dict elements in lists by attribute
|
|
tags: order_by
|
|
vars:
|
|
snapshots:
|
|
- name: sales
|
|
create_time: '2024-06-25T00:52:55.127000+00:00'
|
|
- name: test
|
|
create_time: '2024-05-17T01:53:12.103220+00:00'
|
|
ansible.builtin.set_fact:
|
|
snapshot_latest: "{{ snapshots | sort(attribute='create_time') | last }}"
|
|
- name: Give back the first not null value
|
|
tags: coalesce
|
|
vars:
|
|
list_with_null_values:
|
|
- null
|
|
- null
|
|
- something
|
|
- something else
|
|
ansible.builtin.set_fact:
|
|
first_non_null_value: "{{ list_with_null_values | select | first }}"
|
|
- name: Get values for a specific attribute in a list of dictionaries
|
|
ansible.builtin.set_fact:
|
|
vpc_security_group_ids: >-
|
|
{{ instance_information.vpc_security_groups | map(attribute='vpc_security_group_id') }}
|
|
volume_ids: >-
|
|
{{ instances_information.instances[0].block_device_mappings | map(attribute='ebs.volume_id') }}
|
|
- name: Return only elements with specific attributes matching a filter
|
|
ansible.builtin.set_fact:
|
|
available_rds_snapshots: "{{ snapshots_list | selectattr('status', 'equalto', 'available') }}"
|
|
mounts_with_path: "{{ ansible_facts.mounts | selectattr('mount', 'in', path) }}"
|
|
- name: Return all elements *but* the ones with specific attributes matching a filter
|
|
ansible.builtin.set_fact:
|
|
available_rds_snapshots: "{{ snapshots_list | rejectattr('status', 'equalto', 'creating') }}"
|
|
mounts_without_path: "{{ ansible_facts.mounts | rejectattr('mount', 'in', path) }}"
|
|
- name: Remove lines about RDS protected users and permissions from a dump file
|
|
# remove empty lines
|
|
# remove comments
|
|
# remove creation of the master user
|
|
# remove anything involving 'rdsadmin'
|
|
# remove changes to protected RDS users
|
|
# remove protected 'superuser' and 'replication' assignments
|
|
vars:
|
|
# **Hack notice**: Ansible has issues with splitting on new lines if this template is quoted differently
|
|
permissions_dump_content_as_lines: "{{ dump_file.content | ansible.builtin.b64decode | split('\n') }}"
|
|
master_username: postgresql
|
|
ansible.builtin.set_fact:
|
|
permissions_commands: >-
|
|
{{
|
|
permissions_dump_content_as_lines
|
|
| reject('match', '^$')
|
|
| reject('match', '^--')
|
|
| reject('match', '^CREATE ROLE ' + master_username)
|
|
| reject('match', '.*rdsadmin.*')
|
|
| reject('match', '^(CREATE|ALTER) ROLE rds_')
|
|
| map('regex_replace', '(NO)(SUPERUSER|REPLICATION)\s?', '')
|
|
}}
|
|
- name: Manipulate dictionaries
|
|
tags: dictionary_manipulation
|
|
vars:
|
|
organization:
|
|
address: 123 common lane
|
|
id: 123abc
|
|
block:
|
|
- name: Add keys to dictionaries
|
|
ansible.builtin.set_fact:
|
|
organization: "{{ organization | combine({ 'name': 'ExampleOrg' }) }}"
|
|
- name: Sort keys in dictionaries
|
|
ansible.builtin.set_fact:
|
|
organization: "{{ organization | dictsort }}"
|
|
- name: Pretty print dictionaries
|
|
ansible.builtin.set_fact:
|
|
organization: "{{ organization | to_nice_json }}"
|
|
- name: Merge dictionaries
|
|
vars:
|
|
dict_1:
|
|
a: 43
|
|
b: some string
|
|
dict_2:
|
|
y: true
|
|
z:
|
|
- 4
|
|
- test
|
|
ansible.builtin.set_fact:
|
|
merged_dict: "{{ dict1 | ansible.builtin.combine(dict_2, {'z':'new_value','w':[44]}) }}"
|
|
recursively_merged_dict: >-
|
|
{{ {'rest':'test'} | ansible.builtin.combine({'z':'newValue','w':[44]}, dict_1, dict_2, recursive=true) }}
|
|
- name: Register the list of extensions per DB
|
|
vars:
|
|
db_extensions: {}
|
|
ansible.builtin.set_fact:
|
|
db_extensions: >-
|
|
{{
|
|
db_extensions
|
|
| combine({
|
|
item.item: item.query_result | map(attribute='extname')
|
|
})
|
|
}}
|
|
with_items: "{{ db_extensions_query.results }}"
|
|
- name: Register the list of extensions per DB as 'db:extensions[]' pairs
|
|
vars:
|
|
db_extensions:
|
|
sales:
|
|
- pgaudit
|
|
- plpgsql
|
|
countries:
|
|
- pgcrypto
|
|
- postgis
|
|
- pg_stat_statements
|
|
ansible.builtin.set_fact:
|
|
db_extension_pairs:
|
|
# Refer https://jinja.palletsprojects.com/en/3.0.x/templates/#assignments for the namespace object's
|
|
# reason
|
|
>-
|
|
{%- set ns = namespace(output = []) -%}
|
|
{%- for db in db_extensions.keys() -%}
|
|
{%- for extension in db_extensions[db] -%}
|
|
{{- ns.output.append({'db':db, 'extension': extension}) -}}
|
|
{%- endfor -%}
|
|
{%- endfor -%}
|
|
{{- ns.output -}}
|
|
- name: Get the device name and last snapshot id for all block devices in an EC2 instance
|
|
# Useful to create AMIs from instance snapshots
|
|
tags:
|
|
- aws
|
|
- ec2
|
|
- snapshot
|
|
- ami
|
|
ansible.builtin.set_fact:
|
|
last_snap_for_device:
|
|
# Refer https://jinja.palletsprojects.com/en/3.0.x/templates/#assignments for the namespace object's
|
|
# reason
|
|
>-
|
|
{%- set ns = namespace(devices_list = []) -%}
|
|
{%- for result in current_instance_snapshots.results -%}
|
|
{%- for device in current_instance_information.instances[0].block_device_mappings
|
|
| selectattr('ebs.volume_id', 'equalto', result.volume_id) -%}
|
|
{{-
|
|
ns.devices_list.append({
|
|
'device_name': device.device_name,
|
|
'snapshot_id': result.snapshots
|
|
| sort(attribute='start_time') | last
|
|
| json_query('snapshot_id'),
|
|
})
|
|
-}}
|
|
{%- endfor -%}
|
|
{%- endfor -%}
|
|
{{ ns.devices_list }}
|
|
|
|
- name: Flow control
|
|
tags:
|
|
- flow_control
|
|
- never
|
|
hosts: localhost
|
|
connection: local
|
|
gather_facts: false
|
|
ignore_errors: true
|
|
tasks:
|
|
- name: Take pauses
|
|
tags:
|
|
- pause
|
|
- sleep
|
|
ansible.builtin.pause:
|
|
seconds: 1
|
|
- name: Only run in check mode
|
|
tags: check_mode_only
|
|
when: ansible_check_mode is truthy
|
|
ansible.builtin.set_fact:
|
|
check_mode_active: ansible_check_mode
|
|
- name: Enforce failures
|
|
tags: failure
|
|
ansible.builtin.fail:
|
|
msg: Manually enforced failure
|
|
- name: Fail task on any non-compliance
|
|
tags: assertion
|
|
vars:
|
|
installation_method: package
|
|
url: https://www.google.com/
|
|
ansible.builtin.assert:
|
|
that:
|
|
- installation_method in ['container', 'package']
|
|
- "'https://www.google.com/' is ansible.builtin.url"
|
|
- "'domain.example.com' is community.general.fqdn_valid(min_labels=2)"
|
|
- url is regex('\w\.com/')
|
|
fail_msg: What to say if any of the above conditions fail
|
|
success_msg: What to say if all of the above conditions succeed
|
|
- name: Execute notified handlers now
|
|
tags: handler
|
|
ansible.builtin.meta: flush_handlers
|
|
- name: Do nothing
|
|
tags: noop
|
|
ansible.builtin.meta: noop
|
|
- name: Retry failing tasks
|
|
tags:
|
|
- failure
|
|
- retry
|
|
changed_when: false
|
|
ansible.builtin.command: /usr/bin/false
|
|
retries: 3
|
|
delay: 1
|
|
register: command_result
|
|
until: command_result is not failed
|
|
- name: Error handling in blocks
|
|
tags: error_handling
|
|
block:
|
|
- name: This executes normally
|
|
ansible.builtin.debug:
|
|
msg: I execute normally
|
|
- name: This errors out
|
|
changed_when: false
|
|
ansible.builtin.command: /bin/false
|
|
- name: This never executes
|
|
ansible.builtin.debug:
|
|
msg: I never execute due to the above task failing
|
|
rescue:
|
|
- name: This executes if any errors arose in the block
|
|
ansible.builtin.debug:
|
|
msg: I caught an error and can do stuff here to fix it
|
|
always:
|
|
- name: This always executes
|
|
ansible.builtin.debug:
|
|
msg: I always execute
|
|
- name: Long-running tasks
|
|
tags: long-running
|
|
vars:
|
|
ansible_async_dir: /tmp/.ansible/async # defaults to '~/.ansible_async'
|
|
block:
|
|
- name: Long-running task with integrated poll
|
|
tags: async_with_self_poll
|
|
when: ansible_check_mode is falsy # check mode and async cannot be used on same task
|
|
ansible.builtin.command: /bin/sleep 15
|
|
changed_when: false
|
|
async: 45 # run max 45s
|
|
poll: 5 # check once every 5s
|
|
- name: Long-running task with external poll
|
|
tags: async_with_external_poll
|
|
block:
|
|
- name: Long-running task with external poll
|
|
when: ansible_check_mode is falsy # check mode and async cannot be used on same task
|
|
ansible.builtin.command: /bin/sleep 15
|
|
changed_when: false
|
|
async: 45 # run max 45s
|
|
poll: 0 # fire and forget
|
|
register: long_running_task_with_external_poll
|
|
- name: Check on long_running_task_with_external_poll
|
|
when: long_running_task_with_external_poll is not skipped
|
|
ansible.builtin.async_status:
|
|
jid: "{{ long_running_task_with_external_poll.ansible_job_id }}"
|
|
register: job_result
|
|
until: job_result.finished
|
|
retries: 9
|
|
delay: 5
|
|
- name: Run this play on up to 3 hosts at a time
|
|
hosts: large_group_of_hosts
|
|
serial: 3
|
|
tasks: []
|
|
- name: Run this play on a batch of 4 hosts, then a batch of 8, then the rest
|
|
hosts: large_group_of_hosts
|
|
serial:
|
|
- 4
|
|
- 8
|
|
- 100%
|
|
strategy: linear
|
|
tasks:
|
|
- name: Limit this task to 3 workers (or up to the current batch if lower)
|
|
throttle: 3
|
|
ansible.builtin.set_fact:
|
|
greetings: hi from {{ ansible_hostname }}
|
|
# - name: Run this task only on one single host
|
|
# run_once: true
|
|
# ansible.builtin.set_fact:
|
|
# confirm: only run on {{ ansible_hostname }}
|
|
|
|
- name: Debugging
|
|
tags:
|
|
- debug
|
|
- never
|
|
hosts: localhost
|
|
connection: local
|
|
gather_facts: false
|
|
ignore_errors: true
|
|
tasks:
|
|
- name: Output messages
|
|
ansible.builtin.debug:
|
|
msg: I always display!
|
|
- name: Pretty print messages
|
|
vars:
|
|
install_method: package
|
|
supported_install_methods: ['package']
|
|
ansible.builtin.debug:
|
|
msg: >-
|
|
{{
|
|
dict([
|
|
[ 'install_method', install_method ],
|
|
[ 'install_method in supported_install_methods', install_method in supported_install_methods ],
|
|
])
|
|
}}
|
|
- name: Output variable values
|
|
vars:
|
|
install_method: package
|
|
ansible.builtin.debug:
|
|
var: install_method
|
|
- name: Output messages depending on the verbosity level
|
|
ansible.builtin.debug:
|
|
msg: I only display with 'ansible-playbook -vvv' or with more 'v's
|
|
verbosity: 3
|
|
- name: Start the debugger on failure
|
|
# print all variables at this point => p task_vars
|
|
# continue => c
|
|
# abort and quit => q
|
|
debugger: on_failed
|
|
ansible.builtin.fail:
|
|
msg: Manually enforced failure
|
|
|
|
- name: Prompt for vars
|
|
tags:
|
|
- prompt
|
|
- never
|
|
hosts: localhost
|
|
connection: local
|
|
gather_facts: false
|
|
vars_prompt:
|
|
# only works at playbook level
|
|
- name: target_db
|
|
prompt: Target DB
|
|
private: false
|
|
default: localhost
|
|
- name: db_password
|
|
prompt: DB password
|
|
default: whatever
|
|
tasks: []
|
|
|
|
- name: Run tasks on different targets than the current one
|
|
# Only *single* target patterns allowed
|
|
# No groups
|
|
tags:
|
|
- never
|
|
hosts: localhost
|
|
connection: local
|
|
gather_facts: false
|
|
tasks:
|
|
- name: Run on the controller
|
|
delegate_to: 127.0.0.1
|
|
connection: local
|
|
changed_when: false
|
|
ansible.builtin.command: hostname
|
|
- name: Run on other targets
|
|
delegate_to: copernicus
|
|
changed_when: false
|
|
ansible.builtin.command: hostname
|
|
|
|
- name: Reuse tasks
|
|
tags:
|
|
- never
|
|
hosts: localhost
|
|
connection: local
|
|
gather_facts: false
|
|
tasks: []
|
|
# - name: Statically use tasks from files
|
|
# when: false
|
|
# block:
|
|
# - name: By using absolute paths and special variables (preferred)
|
|
# ansible.builtin.import_tasks:
|
|
# file: "{{ role_path }}/tasks/install/{{ install_method }}.yml"
|
|
# - name: By using paths relative to the including file
|
|
# ansible.builtin.import_tasks:
|
|
# file: pre-flight.yml
|
|
# - name: Conditionally include tasks
|
|
# block:
|
|
# - name: by leveraging the 'with_fileglob' loop filter (preferred)
|
|
# ansible.builtin.include_tasks:
|
|
# file: "{{ item }}"
|
|
# with_fileglob: "{{ install_method }}.yml"
|
|
# - name: by checking the files' existence
|
|
# vars:
|
|
# filename: "{{ install_method }}.yml"
|
|
# when: lookup('ansible.builtin.fileglob', filename) != []
|
|
# ansible.builtin.import_tasks:
|
|
# file: "{{ filename }}"
|
|
|
|
# - name: Reuse playbooks
|
|
# # only works at playbook level
|
|
# vars:
|
|
# var_for_playbook_1: value1
|
|
# ansible.builtin.import_playbook: path/to/playbook.yml
|
|
|
|
# - name: Apply roles
|
|
# hosts: localhost
|
|
# connection: local
|
|
# gather_facts: false
|
|
# roles:
|
|
# - role_name
|
|
# - path/to/role
|
|
# - role: role_name
|
|
# - role: role_with_vars
|
|
# vars:
|
|
# var1_for_role_with_vars: value
|
|
# tasks:
|
|
# - name: Apply a role now
|
|
# ansible.builtin.import_role:
|
|
# name: role_name
|
|
|
|
- name: Integrate Ansible Vault
|
|
tags:
|
|
- vault
|
|
- never
|
|
hosts: localhost
|
|
connection: local
|
|
gather_subset:
|
|
- '!all'
|
|
- min
|
|
check_mode: true
|
|
tasks:
|
|
- name: Use encrypted values
|
|
ansible.builtin.set_fact:
|
|
var_from_encrypted_value:
|
|
# password: '1q2w3e4r', plaintext value: 'very secret string'
|
|
!vault |
|
|
$ANSIBLE_VAULT;1.1;AES256
|
|
34646464653830386631363430386432666530356364313532313336373665613038633464376335
|
|
3539363530613130623638313063363165386230646566640a313438386133366137383939336637
|
|
33333365393337326239336264623462373064383663363234353635316538356461353061646563
|
|
3037306464363439340a663430313739393439363936613862316361353330363638323065383063
|
|
39613935613035343637336537643266313737666635313730353034373736353736
|
|
- name: Use encrypted files
|
|
# The 'unvault' lookup plugin requires files to exist beforehand, but it is fine for them to be plaintext.
|
|
# The 'file' lookup plugin requires files to exist beforehand, but decrypts vault-encryped files.
|
|
tags: tls_certificate
|
|
ansible.builtin.copy:
|
|
dest: /etc/haproxy/certificate.pem
|
|
content: |
|
|
{{ lookup('ansible.builtin.unvault', 'path/to/cert/key.pem') | string | trim }}
|
|
{{ lookup('ansible.builtin.unvault', 'path/to/cert/full_chain.pem') | string | trim }}
|
|
mode: '0700'
|
|
- name: Save data to encrypted files
|
|
# Of fu*king course the 'vault' filter would use the 'filter_default' vault ID by default to encrypt content.
|
|
# Set that parameter to '' to *not* specify a vault ID.
|
|
vars:
|
|
ansible_vault_password: >-
|
|
{{ lookup('ansible.builtin.file', [playbook_dir, 'ansible_vault_password_file.txt'] | path_join) }}
|
|
ansible.builtin.copy:
|
|
dest: path/to/file
|
|
decrypt: false # necessary if the file does not exist beforehand
|
|
content: "{{ 'some string' | ansible.builtin.vault(ansible_vault_password, vault_id='') }}"
|
|
mode: '0644'
|
|
|
|
- name: Use YAML anchors
|
|
hosts: localhost
|
|
connection: local
|
|
vars:
|
|
some_reusable_task: &some_reusable_task
|
|
name: Some reusable task
|
|
tags: some_reusable_task
|
|
check_mode: false
|
|
ansible.builtin.set_fact:
|
|
some_fact: "{{ some_var | default('some value') }}"
|
|
some_reusable_tasks_block: &some_reusable_tasks_block
|
|
name: Some reusable tasks block
|
|
tags: some_reusable_tasks_block
|
|
block:
|
|
- name: Some first reusable task in block
|
|
tags: some_first_reusable_task_in_block
|
|
check_mode: false
|
|
ansible.builtin.set_fact:
|
|
some_first_fact: "{{ some_first_var | default('some first value') }}"
|
|
- name: Some nth reusable task in block
|
|
tags: some_nth_reusable_task_in_block
|
|
check_mode: false
|
|
ansible.builtin.set_fact:
|
|
some_nth_fact: "{{ some_nth_var | default('some nth value') }}"
|
|
tasks:
|
|
- *some_reusable_task
|
|
- <<: *some_reusable_task
|
|
vars:
|
|
some_var: some overridden value
|
|
- *some_reusable_tasks_block
|
|
|
|
- name: Common operations
|
|
tags: never
|
|
hosts: localhost
|
|
connection: local
|
|
gather_subset:
|
|
- '!all'
|
|
- min
|
|
check_mode: true
|
|
tasks: # ordered alphabetically by name
|
|
- name: Add authorized keys
|
|
become: true
|
|
ansible.posix.authorized_key:
|
|
user: ansible
|
|
key: ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAI0123456789abcdefghijkl/ABCDEFGHIJKL01234567 ansible@example.org
|
|
- name: Add repositories
|
|
block:
|
|
- name: To DNF/YUM
|
|
when: ansible_pkg_mgr | lower in ['dnf', 'yum']
|
|
ansible.builtin.yum_repository:
|
|
name: epel
|
|
description: EPEL YUM repo
|
|
baseurl: https://download.fedoraproject.org/pub/epel/$releasever/$basearch/
|
|
- name: Add users to the sudoers
|
|
become: true
|
|
community.general.sudoers:
|
|
name: ansible
|
|
user: ansible
|
|
nopassword: true
|
|
commands: ALL
|
|
- name: Create directories recursively
|
|
ansible.builtin.file:
|
|
path: /tmp/path/to/final/dir
|
|
state: directory
|
|
mode: '0775'
|
|
- name: Create users
|
|
become: true
|
|
ansible.builtin.user:
|
|
name: ansible
|
|
- name: Define files content in tasks
|
|
ansible.builtin.copy:
|
|
dest: "{{ ansible_user_dir }}/.tmux.conf"
|
|
mode: u=rw,go=r
|
|
content: |
|
|
…
|
|
- name: Generate passwords
|
|
ansible.builtin.set_fact:
|
|
random_password: "{{ lookup('ansible.builtin.password', '/dev/null') }}"
|
|
random_password_with_requirements: >-
|
|
{{ lookup('ansible.builtin.password', '/dev/null length=32 chars=ascii_letters,digits,punctuation') }}
|
|
random_but_idempotent_password: >-
|
|
{{ lookup('ansible.builtin.password', '/dev/null', seed=inventory_hostname, length=16) }}
|
|
- name: Look for files
|
|
ansible.builtin.set_fact:
|
|
path_list_of_all_txt_files_in_dir: "{{ lookup('ansible.builtin.fileglob', '/my/path/*.txt') }}"
|
|
- name: Install packages
|
|
block:
|
|
- name: Via package manager on any supported system
|
|
ansible.builtin.package:
|
|
name:
|
|
- tmux
|
|
- screen
|
|
- name: Via PIP
|
|
ansible.builtin.pip:
|
|
name:
|
|
- bottle
|
|
- django>1.11.0,<1.12.0
|
|
- svn+http://myrepo/svn/MyApp#egg=MyApp
|
|
- git+http://myrepo/app/MyApp
|
|
- file:///path/to/MyApp.tar.gz
|
|
- name: Gather facts about hosts previously not in inventory
|
|
delegate_to: host_previously_not_in_inventory
|
|
ansible.builtin.setup:
|
|
filter:
|
|
- '!all'
|
|
- min
|
|
- name: Unarchive files
|
|
tags: unarchive
|
|
when:
|
|
- archive_file.stat.mimetype in ["application/x-tar"]
|
|
- archive_file.stat.path is regex('\\.tar')
|
|
block:
|
|
- name: Unarchive file '{{ archive_file.stat.path | basename }}'
|
|
ansible.builtin.unarchive:
|
|
remote_src: true
|
|
src: "{{ archive_file.stat.path }}"
|
|
dest: "{{ archive_file.stat.path | dirname }}"
|
|
- name: Get the name of extracted directories ending in '.dir'
|
|
tags: find
|
|
ansible.builtin.find:
|
|
paths: "{{ archive_file.stat.path | dirname }}"
|
|
recurse: false
|
|
file_type: directory
|
|
patterns: '*.dir'
|
|
register: extracted_dirs
|
|
- name: Save the first extracted directory found
|
|
ansible.builtin.stat:
|
|
path: "{{ extracted_dirs.files[0].path }}"
|
|
register: first_extracted_dir
|
|
- name: Run containers
|
|
block:
|
|
- name: Directly
|
|
community.docker.docker_container:
|
|
name: gitlab
|
|
image: gitlab/gitlab-ce:16.11.2-ce.0
|
|
hostname: gitlab.lan
|
|
published_ports:
|
|
- "8022:22"
|
|
- "8080:80"
|
|
- "8443:443"
|
|
env:
|
|
GITLAB_OMNIBUS_CONFIG: >-
|
|
external_url 'http://gitlab.lan';
|
|
shm_size: 256m
|
|
volumes:
|
|
- ./config:/etc/gitlab:Z
|
|
- ./logs:/var/log/gitlab:Z
|
|
- ./data:/var/opt/gitlab:Z
|
|
auto_remove: true
|
|
- name: With Compose
|
|
community.docker.docker_compose_v2:
|
|
project_src: /home/user/flask
|
|
- name: Send messages to Slack channels
|
|
vars:
|
|
slack_notification_hook_url: https://hooks.slack.com/services/AB01CD23EF4/ABCD0123E/aBcDefGh0123456789iJKLmn
|
|
block:
|
|
- name: Send plain messages
|
|
ansible.builtin.uri:
|
|
url: "{{ slack_notification_hook_url }}"
|
|
method: POST
|
|
body_format: json
|
|
body:
|
|
text: (╥╯ᗝ╰╥) task XYZ failed
|
|
- name: Send mrkdwn (Slack-specific markdown) text
|
|
# FIXME: still to be tested
|
|
ansible.builtin.uri:
|
|
url: "{{ slack_notification_hook_url }}"
|
|
method: POST
|
|
body_format: json
|
|
body:
|
|
blocks:
|
|
- type: section
|
|
text:
|
|
type: mrkdwn
|
|
text: This is a *_fancy_* message
|
|
- name: Setup cronjobs
|
|
block:
|
|
- name: At specific times
|
|
# Mind this is based on the *hosts'* time.
|
|
become: true
|
|
ansible.builtin.cron:
|
|
name: Prometheus manual data backup
|
|
cron_file: prometheus-manual-data-backup
|
|
hour: 4
|
|
minute: 0
|
|
user: root
|
|
job:
|
|
# - Keep '%' characters escaped or they'll be treated as newlines.
|
|
# - Archive creation returns 1 if it detects changes to read files.
|
|
# Using ';' instead of '&&' to ignore.
|
|
>
|
|
FILENAME="/tmp/prometheus-data-$(date +'\%s-\%F-\%H-\%m-\%S').tar.gz"
|
|
&& tar -czf "$FILENAME" '/var/lib/prometheus/data'
|
|
; tar -tf "$FILENAME" > '/dev/null'
|
|
&& aws s3 cp "$FILENAME" 's3://backups/prometheus/'
|
|
&& rm "$FILENAME"
|
|
- name: Use the users' home directory for something
|
|
block:
|
|
- name: Executing commands from specified users
|
|
block:
|
|
- name: Get users' homedir back
|
|
become: true
|
|
become_user: "{{ item }}"
|
|
become_flags: -iH
|
|
check_mode: false
|
|
ansible.builtin.command: >-
|
|
echo "{{ item }}: $HOME"
|
|
changed_when: false
|
|
with_items:
|
|
- root
|
|
- ec2-user
|
|
register: users_homedir_retrieve
|
|
- name: Compute and register the results
|
|
tags: AnsibleUnsafeText_to_Dict
|
|
ansible.builtin.set_fact:
|
|
users_homedir: >-
|
|
{{
|
|
users_homedir_retrieve
|
|
| community.general.json_query('results[].stdout')
|
|
| map('from_yaml')
|
|
| combine
|
|
}}
|
|
- name: Do your thing!
|
|
become: true
|
|
become_user: "{{ item.key }}"
|
|
ansible.builtin.file:
|
|
path: "{{ item.value }}/placeholder"
|
|
state: touch
|
|
mode: '0755'
|
|
with_dict: "{{ users_homedir }}"
|
|
- name: From the system's entries
|
|
block:
|
|
- name: Get raw information from the system's entries
|
|
ansible.builtin.getent:
|
|
database: passwd
|
|
key: "{{ item }}"
|
|
split: ':'
|
|
with_items:
|
|
- root
|
|
- ec2-user
|
|
register: users_entries
|
|
- name: Compute and register the results
|
|
ansible.builtin.set_fact:
|
|
users_info: >-
|
|
{{
|
|
users_entries
|
|
| community.general.json_query('results[].ansible_facts.getent_passwd[]')
|
|
| combine
|
|
}}
|
|
- name: Do your thing!
|
|
ansible.builtin.file:
|
|
path: "{{ item.value[4] }}/placeholder"
|
|
owner: "{{ item.key }}"
|
|
state: touch
|
|
mode: '0755'
|
|
with_dict: "{{ users_info }}"
|
|
|
|
- name: AWS-specific operations
|
|
tags: never
|
|
hosts: localhost
|
|
connection: local
|
|
gather_facts: false
|
|
check_mode: true
|
|
tasks:
|
|
- name: Apply roles on different targets than the current one
|
|
block: []
|
|
# - name: Gather facts about the target EC2 instance
|
|
# when: instance_information.instance_ids | length > 0
|
|
# delegate_to: "{{ instance_information.instance_ids | first }}"
|
|
# vars:
|
|
# ansible_connection: aws_ssm
|
|
# ansible_python_interpreter: /usr/bin/python3
|
|
# ansible_remote_tmp: /tmp/.ansible-ssm-user/tmp
|
|
# ansible.builtin.gather_facts: {}
|
|
# register: fact_gathering
|
|
# - name: Apply the role to the EC2 instance
|
|
# when: fact_gathering is not skipped
|
|
# delegate_to: "{{ instance_information.instance_ids | first }}"
|
|
# delegate_facts: true
|
|
# vars:
|
|
# ansible_connection: aws_ssm
|
|
# ansible_aws_ssm_timeout: 900
|
|
# ansible_python_interpreter: /usr/bin/python3
|
|
# ansible_remote_tmp: /tmp/.ansible-ssm-user/tmp
|
|
# ansible_async_dir: /tmp/.ansible-ssm-user/async
|
|
# some_role_var: some value
|
|
# some_other_role_var: some value
|
|
# ansible.builtin.import_role:
|
|
# name: role name
|
|
- name: Get the list of current public IP ranges
|
|
# too many to be put into security group rules
|
|
ansible.builtin.set_fact:
|
|
ip_ranges: >-
|
|
lookup('url', 'https://ip-ranges.amazonaws.com/ip-ranges.json', split_lines=False)
|
|
| from_json
|
|
| json_query('prefixes')
|
|
| selectattr('region', 'equalto', 'eu-west-1')
|
|
| selectattr('service', 'equalto', 'AMAZON')
|
|
| map(attribute='ip_prefix')
|
|
- name: Assume roles
|
|
block:
|
|
- name: Get session tokens
|
|
amazon.aws.sts_assume_role:
|
|
access_key: AKIA1EXAMPLE1EXAMPLE # optional if defined as environment variable
|
|
secret_key: 123456789abcdefghijklmnopqrstuvwxyzABCDE # optional if defined as environment variable
|
|
profile: someProfile # optional if defined as environment variable
|
|
role_arn: "arn:aws:iam::123456789012:role/someRole"
|
|
role_session_name: someRoleSession
|
|
duration_seconds: "{{ (60 * 60 * 1) | int }}" # min 900s, must be int
|
|
register: assumed_role
|
|
- name: Use the assumed role to take action
|
|
amazon.aws.ec2_tag:
|
|
access_key: "{{ assumed_role.sts_creds.access_key }}"
|
|
secret_key: "{{ assumed_role.sts_creds.secret_key }}"
|
|
profile: null # required to use the assumed role's token, if profile is specified via environment variable
|
|
session_token: "{{ assumed_role.sts_creds.session_token }}"
|
|
resource: i-xyzxyz01
|
|
tags:
|
|
MyNewTag: value
|
|
- name: Update the play's inventory with a newly started instance
|
|
tags:
|
|
- update_inventory
|
|
- new_instance
|
|
when: new_instance.instance_id is defined
|
|
block:
|
|
- name: Add the new instance to the play's inventory
|
|
tags: add_instance_to_inventory
|
|
ansible.builtin.add_host:
|
|
name: "{{ new_instance.instance_id }}"
|
|
ansible_python_interpreter: /usr/bin/python3
|
|
ansible_connection: community.aws.aws_ssm
|
|
ansible_aws_ssm_bucket_name: company-ssm-logs
|
|
ansible_aws_ssm_region: eu-west-1
|
|
ansible_remote_tmp: /home/ssm-user/.cache/ansible/tmp
|
|
ansible_async_dir: /home/ssm-user/.cache/ansible/async
|
|
- name: Gather facts from the instance
|
|
tags: gather_facts
|
|
delegate_to: "{{ new_instance.instance_id }}"
|
|
delegate_facts: true
|
|
ansible.builtin.gather_facts: # alternatively, use 'ansible.builtin.setup' to allow for subsets gathering
|
|
- name: DEBUG Print the new instance's host variables
|
|
delegate_to: "{{ new_instance.instance_id }}"
|
|
ansible.builtin.debug:
|
|
verbosity: 3
|
|
var: hostvars[new_instance.instance_id]
|
|
- name: Wait for AWS to realize some requests have been made
|
|
ansible.builtin.pause:
|
|
seconds: 60
|
|
- name: EC2-specific operations
|
|
block:
|
|
- name: Get running instances with 'K8S' as the 'Application' tag
|
|
amazon.aws.ec2_instance_info:
|
|
filters:
|
|
"tag:Application": K8S
|
|
instance-state-name: ["running"]
|
|
- name: Clone EC2 instances
|
|
vars:
|
|
source_instance_id: i-0123456789abcdef0
|
|
block:
|
|
- name: Get instance information from the original instance
|
|
amazon.aws.ec2_instance_info:
|
|
instance_ids:
|
|
- "{{ source_instance_id }}"
|
|
register: source_instance_info
|
|
- name: Create an AMI of the original instance
|
|
amazon.aws.ec2_ami:
|
|
instance_id: "{{ source_instance_id }}"
|
|
no_reboot: true # remove if the instance rebooting upon AMI creation is no biggie
|
|
wait: true
|
|
wait_timeout: 3600 # big volumes call for bit wait times (a 200GiB volume took )
|
|
name: ami-source
|
|
register: source_ami
|
|
- name: Use the AMI to launch clones identical to the original
|
|
when: source_ami.image_id is defined
|
|
amazon.aws.ec2_instance:
|
|
name: clone
|
|
vpc_subnet_id: "{{ source_instance_info.instances[0].subnet_id }}"
|
|
instance_type: "{{ source_instance_info.instances[0].instance_type }}"
|
|
image:
|
|
id: "{{ source_ami.image_id }}"
|
|
- name: Long-running tasks via SSM
|
|
vars:
|
|
ansible_connection: community.aws.aws_ssm
|
|
ansible_remote_tmp: /tmp/.ansible-ssm-user/tmp
|
|
ansible_async_dir: /tmp/.ansible-ssm-user/async
|
|
block:
|
|
- name: Dump a DB from an RDS instance to a temporary file
|
|
when:
|
|
- ansible_check_mode is falsy # check mode and async cannot be used on same task
|
|
- rds_instance.endpoint is defined
|
|
vars:
|
|
wanted_pattern_in_module_output: >-
|
|
{{ '"failed": 0, "started": 1, "finished": 0' | regex_escape() }}
|
|
community.postgresql.postgresql_db:
|
|
login_host: "{{ rds_instance.endpoint.address }}"
|
|
login_port: "{{ rds_instance.endpoint.port }}"
|
|
login_user: "{{ rds_instance.master_username }}"
|
|
login_password: "{{ db_password }}"
|
|
name: sales
|
|
state: dump
|
|
target: >-
|
|
[
|
|
{{ ansible_user_dir }},
|
|
dump.{{ db_instance_identifier }}.{{ ansible_date_time.iso8601_basic_short }}.dir
|
|
] | path_join
|
|
target_opts: >-
|
|
--exclude-table …
|
|
--exclude-schema archived
|
|
--no-publications
|
|
--format d --jobs $(nproc)
|
|
async: "{{ 60 * 60 * 2 }}" # wait up to 2 hours -- 60s * 60m * 2h
|
|
poll: 0 # fire and forget; ssm would not allow self-checking anyways
|
|
register: dump
|
|
changed_when:
|
|
- dump.rc == 0
|
|
- dump.module_stderr == ''
|
|
- "'started' | extract(dump.module_stdout | regex_search('{.*}') | from_json) == 1"
|
|
- "'failed' | extract(dump.module_stdout | regex_search('{.*}') | from_json) == 0"
|
|
failed_when: dump.rc != 0
|
|
- name: Check on the dump task
|
|
vars:
|
|
max_wait: "{{ (60 * 60 * 12) }}" # wait for the async task to end
|
|
ansible_aws_ssm_timeout: "{{ max_wait }}" # ssm uses a single connection, keep active until the end
|
|
dump_stdout_as_obj: "{{ dump.module_stdout | regex_search('{.*}') | from_json }}"
|
|
ansible_job_id: "{{ dump_stdout_as_obj.ansible_job_id }}"
|
|
ansible.builtin.async_status:
|
|
jid: "{{ ansible_job_id }}"
|
|
register: dump_result
|
|
until: dump_result.finished
|
|
retries: "{{ max_wait | int }}"
|
|
delay: 300 # check once every 5m to avoid overloading the ssm agent
|
|
- name: RDS-specific operations
|
|
block:
|
|
- name: Create an instance's snapshot
|
|
block:
|
|
- name: Create the snapshot
|
|
amazon.aws.rds_instance_snapshot:
|
|
db_instance_identifier: identifier-for-db-instance
|
|
db_snapshot_identifier: identifier-for-db-snapshot
|
|
register: snapshot_creation
|
|
- name: Wait for the snapshot to be in the 'available' state
|
|
when: snapshot_creation.snapshot_create_time is defined
|
|
amazon.aws.rds_snapshot_info:
|
|
db_snapshot_identifier: "{{ snapshot_creation.db_snapshot_identifier }}"
|
|
register: snapshot_check
|
|
retries: 3
|
|
delay: 120
|
|
until: snapshot_check.snapshots | selectattr('status', 'equalto', 'available') | length > 0
|
|
- name: Restore instance with automatic backup enabled to point in time
|
|
block:
|
|
- name: Restore DB instance
|
|
amazon.aws.rds_instance:
|
|
db_instance_identifier: restored-to-pitr
|
|
creation_source: instance
|
|
source_db_instance_identifier: source-instance
|
|
use_latest_restorable_time: true
|
|
# tags: # avoid setting up, it errors out when restoring to pitr
|
|
wait:
|
|
# avoid waiting for db instances with automatic backup enabled to finish backing up the restored
|
|
# instance right after creation since db instances' first backup takes unbearably long (3h for 100GB)
|
|
false
|
|
register: pitr_restored_instance
|
|
- name: Wait for the restored DB instance to be created
|
|
when:
|
|
- clone_db_instance.backup_retention_period is defined
|
|
- clone_db_instance.backup_retention_period != 0
|
|
block:
|
|
- name: Wait for the restored DB instance to be created
|
|
amazon.aws.rds_instance_info:
|
|
db_instance_identifier: "{{ pitr_restored_instance.db_instance_identifier }}"
|
|
register: pitr_restored_instance_status_check
|
|
retries: 15
|
|
delay: 60
|
|
until: pitr_restored_instance_status_check.instances[0].db_instance_status != 'creating'
|
|
- name: Update restored DB instance information
|
|
# 'amazon.aws.rds_instance' will *not* have the 'endpoint' key defined if not waiting
|
|
ansible.builtin.set_fact:
|
|
pitr_restored_instance: "{{ pitr_restored_instance_status_check.instances[0] }}"
|
|
- name: Dump roles' privileges
|
|
block:
|
|
- name: Dump to file
|
|
environment:
|
|
PGPASSWORD: someRandomString
|
|
vars:
|
|
out_file: /tmp/instance-id_roles.sql
|
|
ansible.builtin.command: >-
|
|
pg_dumpall
|
|
--host 'instance-id.0123456789ab.eu-west-1.rds.amazonaws.com' --port '5432'
|
|
--user 'postgres' --database 'postgres' --no-password
|
|
--roles-only --no-role-passwords
|
|
--file '{{ out_file }}'
|
|
changed_when: false
|
|
- name: Dump to variable for later use through 'dump_execution.stdout_lines'
|
|
environment:
|
|
PGPASSWORD: someRandomString
|
|
ansible.builtin.command: >-
|
|
pg_dumpall
|
|
-h 'instance-id.0123456789ab.eu-west-1.rds.amazonaws.com' -p '5432'
|
|
-U 'postgres' -l 'postgres' -w
|
|
-r --no-role-passwords
|
|
changed_when: false
|
|
register: dump_execution
|
|
- name: Wait for pending changes to be applied
|
|
amazon.aws.rds_instance_info:
|
|
db_instance_identifier: identifier-for-db-instance
|
|
register: instance_check
|
|
retries: 12
|
|
delay: 15
|
|
until: instance_check.instances[0].pending_modified_values.keys() | length == 0
|
|
- name: S3-specific operations
|
|
block:
|
|
- name: Check S3 object exists
|
|
amazon.aws.s3_object_info:
|
|
bucket_name: my-bucket
|
|
object_name: prefix/object.tar
|
|
- name: Download objects from S3
|
|
# The 'amazon.aws.s3_object' module might be *not* suitable for files bigger than the executor's currently
|
|
# available memory. See <https://github.com/ansible-collections/amazon.aws/issues/2395>.
|
|
# TL:DR: at the time of writing, the module keeps downloaded data in memory before flushing it to disk,
|
|
# filling up the host's memory when downloading big files and causing it to stall or crash.
|
|
amazon.aws.s3_object:
|
|
bucket: my-bucket
|
|
object: prefix/object.tar
|
|
dest: /tmp/object.tar
|
|
mode: get
|
|
- name: Upload objects to S3
|
|
amazon.aws.s3_object:
|
|
bucket: my-bucket
|
|
object: prefix/object.tar
|
|
src: /tmp/object.tar
|
|
mode: put
|
|
- name: "AWS: Start stopped instances and add the first of them to the inventory for the next play"
|
|
# works at playbook level
|
|
tags:
|
|
- never
|
|
hosts: localhost
|
|
connection: local
|
|
check_mode: false
|
|
handlers:
|
|
- name: Add the first started host
|
|
ansible.builtin.add_host:
|
|
groups:
|
|
- tag_Application_Postgres
|
|
- tag_Component_Dumper
|
|
name: "{{ started_instances.instance_ids[0] }}"
|
|
- name: Add all started hosts with host variables
|
|
loop: "{{ started_instances.instance_ids }}"
|
|
ansible.builtin.add_host:
|
|
groups:
|
|
- tag_Application_Postgres
|
|
- tag_Component_Dumper
|
|
name: "{{ item }}"
|
|
ansible_connection: aws_ssm
|
|
ansible_aws_ssm_bucket_name: company-ssm-logs
|
|
ansible_aws_ssm_region: eu-west-1
|
|
ansible_aws_ssm_timeout: 900
|
|
ansible_remote_tmp: /tmp/.ansible-ssm-user/tmp
|
|
ansible_async_dir: /tmp/.ansible-ssm-user/async
|
|
tasks:
|
|
- name: Start the PG dumper instance
|
|
tags: dumper
|
|
amazon.aws.ec2_instance:
|
|
filters:
|
|
tag:Application: Postgres
|
|
tag:Component: Dumper
|
|
state: started
|
|
register: started_instances
|
|
notify:
|
|
- Add the first started host
|
|
- Add all started hosts with host variables
|
|
# follow up with play using hosts 'tag_Application_Postgres,&tag_Component_Dumper'
|
|
|
|
- name: AWX-specific operations
|
|
tags: never
|
|
hosts: localhost
|
|
connection: local
|
|
gather_facts: false
|
|
check_mode: true
|
|
environment:
|
|
CONTROLLER_HOST: https://awx.example.org/
|
|
CONTROLLER_VERIFY_SSL: false
|
|
CONTROLLER_USERNAME: admin
|
|
CONTROLLER_PASSWORD: somethingSecret
|
|
tasks:
|
|
- name: Export all data from existing instances
|
|
# At the time of writing: applications, credential_types, credentials, execution_environments, inventory,
|
|
# inventory_sources, job_templates, notification_templates, organizations, projects, schedules, teams, and users.
|
|
awx.awx.export:
|
|
all: true
|
|
register: awx_export_output
|
|
|
|
- name: GitLab-specific operations
|
|
tags: never
|
|
hosts: localhost
|
|
connection: local
|
|
gather_facts: false
|
|
check_mode: true
|
|
tasks:
|
|
- name: Install configured fleeting plugins
|
|
when: runner_executor in ["docker-autoscaler", "instance"]
|
|
become: true
|
|
ansible.builtin.command:
|
|
chdir: /root
|
|
cmd: gitlab-runner fleeting install
|
|
creates: /root/.config/fleeting/plugins
|
|
|
|
- name: Let's Encrypt-specific operations
|
|
tags: never
|
|
hosts: localhost
|
|
connection: local
|
|
gather_facts: false
|
|
check_mode: true
|
|
tasks:
|
|
# The 'acme_certificate' module takes in file paths for the certificate's files; those need either to *not* exist
|
|
# beforehand, or their content to be in specific formats.
|
|
- name: Revoke test certificates with account key
|
|
community.crypto.acme_certificate_revoke:
|
|
acme_directory: https://acme-staging-v02.api.letsencrypt.org/directory
|
|
acme_version: 2
|
|
account_key_src: path/to/acme_account.key.pem
|
|
certificate: path/to/certificate.crt.pem
|