Fix no-changed-when ansible-lint errors
Reference: https://ansible-lint.readthedocs.io/en/latest/default_rules/#no-changed-when
This commit is contained in:
parent
0ab2001ce7
commit
1693c4ca1d
@ -52,6 +52,8 @@
|
||||
{{ matrix_appservice_irc_docker_image }}
|
||||
-c
|
||||
'/usr/local/bin/node /app/lib/scripts/migrate-db-to-pgres.js --dbdir /data --privateKey /data/passkey.pem --connectionString {{ matrix_appservice_irc_database_connection_string }}'
|
||||
register: matrix_appservice_irc_import_nedb_to_postgres_result
|
||||
changed_when: matrix_appservice_irc_import_nedb_to_postgres_result.rc == 0
|
||||
|
||||
- name: Archive NeDB database files
|
||||
ansible.builtin.command:
|
||||
@ -59,6 +61,8 @@
|
||||
with_items:
|
||||
- rooms.db
|
||||
- users.db
|
||||
register: matrix_appservice_irc_import_nedb_to_postgres_move_result
|
||||
changed_when: matrix_appservice_irc_import_nedb_to_postgres_move_result.rc == 0
|
||||
|
||||
- name: Inject result
|
||||
ansible.builtin.set_fact:
|
||||
|
@ -30,10 +30,16 @@
|
||||
failed_when: false
|
||||
|
||||
- name: (Data relocation) Move AppService IRC passkey.pem file to ./data directory
|
||||
ansible.builtin.command: "mv {{ matrix_appservice_irc_base_path }}/passkey.pem {{ matrix_appservice_irc_data_path }}/passkey.pem"
|
||||
ansible.builtin.command:
|
||||
cmd: "mv {{ matrix_appservice_irc_base_path }}/passkey.pem {{ matrix_appservice_irc_data_path }}/passkey.pem"
|
||||
register: matrix_appservice_irc_move_passkey_result
|
||||
changed_when: matrix_appservice_irc_move_passkey_result.rc == 0
|
||||
|
||||
- name: (Data relocation) Move AppService IRC database files to ./data directory
|
||||
ansible.builtin.command: "mv {{ matrix_appservice_irc_base_path }}/{{ item }} {{ matrix_appservice_irc_data_path }}/{{ item }}"
|
||||
ansible.builtin.command:
|
||||
cmd: "mv {{ matrix_appservice_irc_base_path }}/{{ item }} {{ matrix_appservice_irc_data_path }}/{{ item }}"
|
||||
register: matrix_appservice_irc_move_dbs_result
|
||||
changed_when: matrix_appservice_irc_move_dbs_result.rc == 0
|
||||
with_items:
|
||||
- rooms.db
|
||||
- users.db
|
||||
|
@ -46,10 +46,14 @@
|
||||
{{ matrix_appservice_slack_docker_image }}
|
||||
-c
|
||||
'/usr/local/bin/node /usr/src/app/lib/scripts/migrateToPostgres.js --dbdir /data --connectionString {{ matrix_appservice_slack_database_connection_string }}'
|
||||
register: matrix_appservice_slack_import_nedb_to_postgres_result
|
||||
changed_when: matrix_appservice_slack_import_nedb_to_postgres_result.rc == 0
|
||||
|
||||
- name: Archive NeDB database files
|
||||
ansible.builtin.command:
|
||||
cmd: "mv {{ matrix_appservice_slack_data_path }}/{{ item }} {{ matrix_appservice_slack_data_path }}/{{ item }}.backup"
|
||||
register: matrix_appservice_slack_import_nedb_to_postgres_move_result
|
||||
changed_when: matrix_appservice_slack_import_nedb_to_postgres_move_result.rc == 0
|
||||
with_items:
|
||||
- teams.db
|
||||
- room-store.db
|
||||
|
@ -48,14 +48,17 @@
|
||||
# Building the container image (using the default Dockerfile) requires that a docker-requirements.txt file be generated.
|
||||
# See: https://gitlab.com/beeper/linkedin/-/blob/94442db17ccb9769b377cdb8e4bf1cb3955781d7/.gitlab-ci.yml#L30-40
|
||||
- name: Ensure docker-requirements.txt is generated before building Beeper LinkedIn Docker Image
|
||||
ansible.builtin.command: |
|
||||
{{ matrix_host_command_docker }} run \
|
||||
--rm \
|
||||
--entrypoint=/bin/sh \
|
||||
--mount type=bind,src={{ matrix_beeper_linkedin_docker_src_files_path }},dst=/work \
|
||||
-w /work \
|
||||
docker.io/python:3.9.6-buster \
|
||||
-c "pip install poetry && poetry export --without-hashes -E e2be -E images -E metrics | sed 's/==.*//g' > docker-requirements.txt"
|
||||
ansible.builtin.command:
|
||||
cmd: |
|
||||
{{ matrix_host_command_docker }} run \
|
||||
--rm \
|
||||
--entrypoint=/bin/sh \
|
||||
--mount type=bind,src={{ matrix_beeper_linkedin_docker_src_files_path }},dst=/work \
|
||||
-w /work \
|
||||
docker.io/python:3.9.6-buster \
|
||||
-c "pip install poetry && poetry export --without-hashes -E e2be -E images -E metrics | sed 's/==.*//g' > docker-requirements.txt"
|
||||
register: matrix_beeper_linkedin_generate_docker_requirements_result
|
||||
changed_when: matrix_beeper_linkedin_generate_docker_requirements_result.rc == 0
|
||||
|
||||
- name: Ensure Beeper LinkedIn Docker image is built
|
||||
docker_image:
|
||||
|
@ -69,15 +69,17 @@
|
||||
group: "{{ matrix_user_groupname }}"
|
||||
|
||||
- name: Validate hookshot config.yml
|
||||
ansible.builtin.command: |
|
||||
{{ matrix_host_command_docker }} run
|
||||
--rm
|
||||
--name={{ matrix_hookshot_container_url }}-validate
|
||||
--user={{ matrix_user_uid }}:{{ matrix_user_gid }}
|
||||
--cap-drop=ALL
|
||||
-v {{ matrix_hookshot_base_path }}/config.yml:/config.yml
|
||||
{{ matrix_hookshot_docker_image }} node Config/Config.js /config.yml
|
||||
ansible.builtin.command:
|
||||
cmd: |
|
||||
{{ matrix_host_command_docker }} run
|
||||
--rm
|
||||
--name={{ matrix_hookshot_container_url }}-validate
|
||||
--user={{ matrix_user_uid }}:{{ matrix_user_gid }}
|
||||
--cap-drop=ALL
|
||||
-v {{ matrix_hookshot_base_path }}/config.yml:/config.yml
|
||||
{{ matrix_hookshot_docker_image }} node Config/Config.js /config.yml
|
||||
register: hookshot_config_validation_result
|
||||
changed_when: false
|
||||
|
||||
- name: Fail if hookshot config.yml invalid
|
||||
ansible.builtin.fail:
|
||||
|
@ -36,7 +36,10 @@
|
||||
failed_when: false
|
||||
|
||||
- name: (Data relocation) Move mx-puppet-discord database file to ./data directory
|
||||
ansible.builtin.command: "mv {{ matrix_mx_puppet_discord_base_path }}/database.db {{ matrix_mx_puppet_discord_data_path }}/database.db"
|
||||
ansible.builtin.command:
|
||||
cmd: "mv {{ matrix_mx_puppet_discord_base_path }}/database.db {{ matrix_mx_puppet_discord_data_path }}/database.db"
|
||||
register: matrix_mx_puppet_discord_relocate_database_result
|
||||
changed_when: matrix_mx_puppet_discord_relocate_database_result.rc == 0
|
||||
when: "matrix_mx_puppet_discord_stat_database.stat.exists"
|
||||
|
||||
- ansible.builtin.set_fact:
|
||||
|
@ -1,4 +1,7 @@
|
||||
---
|
||||
|
||||
- name: Run Docker System Prune
|
||||
ansible.builtin.command: "{{ matrix_host_command_docker }} system prune -a -f"
|
||||
ansible.builtin.command:
|
||||
cmd: "{{ matrix_host_command_docker }} system prune -a -f"
|
||||
register: matrix_common_after_docker_prune_result
|
||||
changed_when: matrix_common_after_docker_prune_result.rc == 0
|
||||
|
@ -19,7 +19,10 @@
|
||||
- name: Wait a while, so that Dendrite can manage to start
|
||||
ansible.builtin.pause:
|
||||
seconds: 7
|
||||
when: "start_result.changed"
|
||||
when: start_result.changed | bool
|
||||
|
||||
- name: Register user
|
||||
ansible.builtin.command: "{{ matrix_local_bin_path }}/matrix-dendrite-create-account {{ username|quote }} {{ password|quote }}"
|
||||
ansible.builtin.command:
|
||||
cmd: "{{ matrix_local_bin_path }}/matrix-dendrite-create-account {{ username|quote }} {{ password|quote }}"
|
||||
register: matrix_dendrite_register_user_result
|
||||
changed_when: matrix_dendrite_register_user_result.rc == 0
|
||||
|
@ -44,7 +44,10 @@
|
||||
# recursively copy remote directories (like `/matrix/mxisd/data/sign.key`) in older versions of Ansible.
|
||||
- block:
|
||||
- name: Copy mxisd data files to ma1sd folder
|
||||
ansible.builtin.command: "cp -ar {{ matrix_base_data_path }}/mxisd/data {{ matrix_ma1sd_base_path }}"
|
||||
ansible.builtin.command:
|
||||
cmd: "cp -ar {{ matrix_base_data_path }}/mxisd/data {{ matrix_ma1sd_base_path }}"
|
||||
register: matrix_ma1sd_migrate_mxisd_data_files_copying_result
|
||||
changed_when: matrix_ma1sd_migrate_mxisd_data_files_copying_result.rc == 0
|
||||
|
||||
- name: Check existence of mxisd.db file
|
||||
ansible.builtin.stat:
|
||||
@ -52,11 +55,17 @@
|
||||
register: matrix_ma1sd_mxisd_db_stat
|
||||
|
||||
- name: Rename database (mxisd.db -> ma1sd.db)
|
||||
ansible.builtin.command: "mv {{ matrix_ma1sd_data_path }}/mxisd.db {{ matrix_ma1sd_data_path }}/ma1sd.db"
|
||||
ansible.builtin.command:
|
||||
cmd: "mv {{ matrix_ma1sd_data_path }}/mxisd.db {{ matrix_ma1sd_data_path }}/ma1sd.db"
|
||||
register: matrix_ma1sd_migrate_mxisd_move_db_result
|
||||
changed_when: matrix_ma1sd_migrate_mxisd_move_db_result.rc == 0
|
||||
when: "matrix_ma1sd_mxisd_db_stat.stat.exists"
|
||||
|
||||
- name: Rename mxisd folder
|
||||
ansible.builtin.command: "mv {{ matrix_base_data_path }}/mxisd {{ matrix_base_data_path }}/mxisd.migrated"
|
||||
ansible.builtin.command:
|
||||
cmd: "mv {{ matrix_base_data_path }}/mxisd {{ matrix_base_data_path }}/mxisd.migrated"
|
||||
register: matrix_ma1sd_migrate_mxisd_move_directory_result
|
||||
changed_when: matrix_ma1sd_migrate_mxisd_move_directory_result.rc == 0
|
||||
when: "ma1sd_migrate_mxisd_data_dir_stat.stat.exists"
|
||||
|
||||
- name: Ensure outdated matrix-mxisd.service doesn't exist
|
||||
|
@ -80,10 +80,14 @@
|
||||
{{ matrix_postgres_pgloader_docker_image }}
|
||||
-c
|
||||
'pgloader /in.db {{ postgres_db_connection_string }}'
|
||||
register: matrix_postgres_import_generic_sqlite_db_import_result
|
||||
changed_when: matrix_postgres_import_generic_sqlite_db_import_result.rc == 0
|
||||
|
||||
- name: Archive SQLite database ({{ sqlite_database_path }} -> {{ sqlite_database_path }}.backup)
|
||||
ansible.builtin.command:
|
||||
cmd: "mv {{ sqlite_database_path }} {{ sqlite_database_path }}.backup"
|
||||
register: matrix_postgres_import_generic_sqlite_db_move_result
|
||||
changed_when: matrix_postgres_import_generic_sqlite_db_move_result.rc == 0
|
||||
|
||||
- name: Inject result
|
||||
ansible.builtin.set_fact:
|
||||
|
@ -101,6 +101,9 @@
|
||||
and manually run the above import command directly on the server.
|
||||
|
||||
- name: Perform Postgres database import
|
||||
ansible.builtin.command: "{{ matrix_postgres_import_command }}"
|
||||
ansible.builtin.command:
|
||||
cmd: "{{ matrix_postgres_import_command }}"
|
||||
async: "{{ postgres_import_wait_time }}"
|
||||
poll: 10
|
||||
register: matrix_postgres_import_postgres_command_result
|
||||
changed_when: matrix_postgres_import_postgres_command_result.rc == 0
|
||||
|
@ -70,17 +70,20 @@
|
||||
# Also, some old `docker_container` versions were buggy and would leave containers behind
|
||||
# on failure, which we had to work around to allow retries (by re-running the playbook).
|
||||
- name: Import SQLite database into Postgres
|
||||
ansible.builtin.command: |
|
||||
docker run
|
||||
--rm
|
||||
--name=matrix-synapse-migrate
|
||||
--log-driver=none
|
||||
--user={{ matrix_user_uid }}:{{ matrix_user_gid }}
|
||||
--cap-drop=ALL
|
||||
--network={{ matrix_docker_network }}
|
||||
--entrypoint=python
|
||||
--mount type=bind,src={{ matrix_synapse_config_dir_path }},dst=/data
|
||||
--mount type=bind,src={{ matrix_synapse_config_dir_path }},dst=/matrix-media-store-parent/media-store
|
||||
--mount type=bind,src={{ server_path_homeserver_db }},dst=/{{ server_path_homeserver_db | basename }}
|
||||
{{ matrix_synapse_docker_image }}
|
||||
/usr/local/bin/synapse_port_db --sqlite-database /{{ server_path_homeserver_db | basename }} --postgres-config /data/homeserver.yaml
|
||||
ansible.builtin.command:
|
||||
cmd: |
|
||||
docker run
|
||||
--rm
|
||||
--name=matrix-synapse-migrate
|
||||
--log-driver=none
|
||||
--user={{ matrix_user_uid }}:{{ matrix_user_gid }}
|
||||
--cap-drop=ALL
|
||||
--network={{ matrix_docker_network }}
|
||||
--entrypoint=python
|
||||
--mount type=bind,src={{ matrix_synapse_config_dir_path }},dst=/data
|
||||
--mount type=bind,src={{ matrix_synapse_config_dir_path }},dst=/matrix-media-store-parent/media-store
|
||||
--mount type=bind,src={{ server_path_homeserver_db }},dst=/{{ server_path_homeserver_db | basename }}
|
||||
{{ matrix_synapse_docker_image }}
|
||||
/usr/local/bin/synapse_port_db --sqlite-database /{{ server_path_homeserver_db | basename }} --postgres-config /data/homeserver.yaml
|
||||
register: matrix_postgres_import_synapse_sqlite_db_result
|
||||
changed_when: matrix_postgres_import_synapse_sqlite_db_result.rc == 0
|
||||
|
@ -51,8 +51,11 @@
|
||||
|
||||
- block:
|
||||
- name: Relocate Postgres data files from old directory to new
|
||||
ansible.builtin.command: "mv {{ item.path }} {{ matrix_postgres_data_path }}/{{ item.path | basename }}"
|
||||
ansible.builtin.command:
|
||||
cmd: "mv {{ item.path }} {{ matrix_postgres_data_path }}/{{ item.path | basename }}"
|
||||
with_items: "{{ result_pg_old_data_dir_find.files }}"
|
||||
register: matrix_postgres_migrate_postgres_data_directory_move_result
|
||||
changed_when: matrix_postgres_migrate_postgres_data_directory_move_result.rc == 0
|
||||
when: "result_pg_old_data_dir_stat.stat.exists"
|
||||
|
||||
# Intentionally not starting matrix-postgres here.
|
||||
|
@ -65,7 +65,7 @@
|
||||
ansible.builtin.service_facts:
|
||||
|
||||
- ansible.builtin.set_fact:
|
||||
matrix_postgres_synapse_was_running: "{{ ansible_facts.services['matrix-synapse.service']|default(none) is not none and ansible_facts.services['matrix-synapse.service'].state == 'running' }}"
|
||||
matrix_postgres_synapse_was_running: "{{ ansible_facts.services['matrix-synapse.service'] | default(none) is not none and ansible_facts.services['matrix-synapse.service'].state == 'running' }}"
|
||||
|
||||
- name: Ensure matrix-synapse is stopped
|
||||
ansible.builtin.service:
|
||||
@ -78,6 +78,7 @@
|
||||
async: "{{ postgres_vacuum_wait_time }}"
|
||||
poll: 10
|
||||
register: matrix_postgres_synapse_vacuum_result
|
||||
changed_when: matrix_postgres_synapse_vacuum_result.rc == 0
|
||||
|
||||
# Intentionally show the results
|
||||
- ansible.builtin.debug: var="matrix_postgres_synapse_vacuum_result"
|
||||
|
@ -78,18 +78,21 @@
|
||||
# role (`matrix_postgres_connection_username`) and database (`matrix_postgres_db_name`) by itself on startup,
|
||||
# we need to remove these from the dump, or we'll get errors saying these already exist.
|
||||
- name: Perform Postgres database dump
|
||||
ansible.builtin.command: >-
|
||||
{{ matrix_host_command_docker }} run --rm --name matrix-postgres-dump
|
||||
--log-driver=none
|
||||
--user={{ matrix_user_uid }}:{{ matrix_user_gid }}
|
||||
--network={{ matrix_docker_network }}
|
||||
--env-file={{ matrix_postgres_base_path }}/env-postgres-psql
|
||||
--entrypoint=/bin/sh
|
||||
--mount type=bind,src={{ postgres_dump_dir }},dst=/out
|
||||
{{ matrix_postgres_detected_version_corresponding_docker_image }}
|
||||
-c "pg_dumpall -h matrix-postgres
|
||||
{{ '| gzip -c ' if postgres_dump_name.endswith('.gz') else '' }}
|
||||
> /out/{{ postgres_dump_name }}"
|
||||
ansible.builtin.command:
|
||||
cmd: >-
|
||||
{{ matrix_host_command_docker }} run --rm --name matrix-postgres-dump
|
||||
--log-driver=none
|
||||
--user={{ matrix_user_uid }}:{{ matrix_user_gid }}
|
||||
--network={{ matrix_docker_network }}
|
||||
--env-file={{ matrix_postgres_base_path }}/env-postgres-psql
|
||||
--entrypoint=/bin/sh
|
||||
--mount type=bind,src={{ postgres_dump_dir }},dst=/out
|
||||
{{ matrix_postgres_detected_version_corresponding_docker_image }}
|
||||
-c "pg_dumpall -h matrix-postgres
|
||||
{{ '| gzip -c ' if postgres_dump_name.endswith('.gz') else '' }}
|
||||
> /out/{{ postgres_dump_name }}"
|
||||
register: matrix_postgres_upgrade_postgres_dump_command_result
|
||||
changed_when: matrix_postgres_upgrade_postgres_dump_command_result.rc == 0
|
||||
|
||||
- name: Ensure matrix-postgres is stopped
|
||||
ansible.builtin.service:
|
||||
@ -97,7 +100,10 @@
|
||||
state: stopped
|
||||
|
||||
- name: Rename existing Postgres data directory
|
||||
ansible.builtin.command: "mv {{ matrix_postgres_data_path }} {{ postgres_auto_upgrade_backup_data_path }}"
|
||||
ansible.builtin.command:
|
||||
cmd: "mv {{ matrix_postgres_data_path }} {{ postgres_auto_upgrade_backup_data_path }}"
|
||||
register: matrix_postgres_upgrade_postgres_move_command_result
|
||||
changed_when: matrix_postgres_upgrade_postgres_move_command_result.rc == 0
|
||||
|
||||
- ansible.builtin.debug:
|
||||
msg: "NOTE: Your Postgres data directory has been moved from `{{ matrix_postgres_data_path }}` to `{{ postgres_auto_upgrade_backup_data_path }}`. In the event of failure, you can move it back and run the playbook with --tags=setup-postgres to restore operation."
|
||||
@ -155,7 +161,10 @@
|
||||
and restore the automatically-made backup (`mv {{ postgres_auto_upgrade_backup_data_path }} {{ matrix_postgres_data_path }}`).
|
||||
|
||||
- name: Perform Postgres database import
|
||||
ansible.builtin.command: "{{ matrix_postgres_import_command }}"
|
||||
ansible.builtin.command:
|
||||
cmd: "{{ matrix_postgres_import_command }}"
|
||||
register: matrix_postgres_upgrade_postgres_import_command_result
|
||||
changed_when: matrix_postgres_upgrade_postgres_import_command_result.rc == 0
|
||||
|
||||
- name: Delete Postgres database dump file
|
||||
ansible.builtin.file:
|
||||
|
Loading…
Reference in New Issue
Block a user