Merge remote-tracking branch 'origin/master' into synapse-workers
Sync with upstream
This commit is contained in:
@ -35,4 +35,49 @@ matrix_postgres_process_extra_arguments: []
|
||||
# Takes an "<ip>:<port>" or "<port>" value (e.g. "127.0.0.1:5432"), or empty string to not expose.
|
||||
matrix_postgres_container_postgres_bind_port: ""
|
||||
|
||||
matrix_postgres_tool_synapse_janitor: "https://raw.githubusercontent.com/xwiki-labs/synapse_scripts/a9188ff175ae581610f92d58ea6eac9a114d854b/synapse_janitor.sql"
|
||||
# A list of additional (databases and their credentials) to create.
|
||||
#
|
||||
# Example:
|
||||
# matrix_postgres_additional_databases:
|
||||
# - name: matrix_appservice_discord
|
||||
# username: matrix_appservice_discord
|
||||
# password: some_password
|
||||
# - name: matrix_appservice_slack
|
||||
# username: matrix_appservice_slack
|
||||
# password: some_password
|
||||
matrix_postgres_additional_databases: []
|
||||
|
||||
# A list of roles/users to avoid creating when importing (or upgrading) the database.
|
||||
# If a dump file contains the roles and they've also been created beforehand (see `matrix_postgres_additional_databases`),
|
||||
# importing would fail.
|
||||
# We either need to not create them or to ignore the `CREATE ROLE` statements in the dump.
|
||||
matrix_postgres_import_roles_to_ignore: [matrix_postgres_connection_username]
|
||||
|
||||
matrix_postgres_import_roles_ignore_regex: "^CREATE ROLE ({{ matrix_postgres_import_roles_to_ignore|join('|') }});"
|
||||
|
||||
# A list of databases to avoid creating when importing (or upgrading) the database.
|
||||
# If a dump file contains the databases and they've also been created beforehand (see `matrix_postgres_additional_databases`),
|
||||
# importing would fail.
|
||||
# We either need to not create them or to ignore the `CREATE DATABASE` statements in the dump.
|
||||
matrix_postgres_import_databases_to_ignore: [matrix_postgres_db_name]
|
||||
|
||||
matrix_postgres_import_databases_ignore_regex: "^CREATE DATABASE ({{ matrix_postgres_import_databases_to_ignore|join('|') }})\\s"
|
||||
|
||||
# The number of seconds to wait after starting `matrix-postgres.service`
|
||||
# and before trying to run queries for creating additional databases/users against it.
|
||||
#
|
||||
# For most (subsequent) runs, Postgres would already be running, so no waiting will be happening at all.
|
||||
matrix_postgres_additional_databases_postgres_start_wait_timeout_seconds: 15
|
||||
|
||||
|
||||
matrix_postgres_pgloader_container_image_self_build: false
|
||||
matrix_postgres_pgloader_container_image_self_build_repo: "https://github.com/illagrenan/pgloader-docker.git"
|
||||
matrix_postgres_pgloader_container_image_self_build_repo_branch: "v{{ matrix_postgres_pgloader_docker_image_tag }}"
|
||||
matrix_postgres_pgloader_container_image_self_build_src_path: "{{ matrix_postgres_base_path }}/pgloader-container-src"
|
||||
|
||||
# We use illagrenan/pgloader, instead of the more official dimitri/pgloader image,
|
||||
# because the official one only provides a `latest` tag.
|
||||
matrix_postgres_pgloader_docker_image: "{{ matrix_postgres_pgloader_docker_image_name_prefix }}illagrenan/pgloader:{{ matrix_postgres_pgloader_docker_image_tag }}"
|
||||
matrix_postgres_pgloader_docker_image_name_prefix: "{{ 'localhost/' if matrix_postgres_pgloader_container_image_self_build else 'docker.io/' }}"
|
||||
matrix_postgres_pgloader_docker_image_tag: "3.6.2"
|
||||
matrix_postgres_pgloader_docker_image_force_pull: "{{ matrix_postgres_pgloader_docker_image.endswith(':latest') }}"
|
||||
|
97
roles/matrix-postgres/tasks/import_generic_sqlite_db.yml
Normal file
97
roles/matrix-postgres/tasks/import_generic_sqlite_db.yml
Normal file
@ -0,0 +1,97 @@
|
||||
---
|
||||
|
||||
# Pre-checks
|
||||
|
||||
- name: Fail if Postgres not enabled
|
||||
fail:
|
||||
msg: "Postgres via the matrix-postgres role is not enabled (`matrix_postgres_enabled`). Cannot import."
|
||||
when: "not matrix_postgres_enabled|bool"
|
||||
|
||||
- name: Fail if playbook called incorrectly
|
||||
fail:
|
||||
msg: "The `sqlite_database_path` variable needs to be provided to this playbook, via --extra-vars"
|
||||
when: "sqlite_database_path is not defined or sqlite_database_path.startswith('<')"
|
||||
|
||||
- name: Check if the provided SQLite database file exists
|
||||
stat:
|
||||
path: "{{ sqlite_database_path }}"
|
||||
register: sqlite_database_path_stat_result
|
||||
|
||||
- name: Fail if provided SQLite database file doesn't exist
|
||||
fail:
|
||||
msg: "File cannot be found on the server at {{ sqlite_database_path }}"
|
||||
when: "not sqlite_database_path_stat_result.stat.exists"
|
||||
|
||||
# We either expect `postgres_db_connection_string` specifying a full Postgres database connection string,
|
||||
# or `postgres_connection_string_variable_name`, specifying a name of a variable, which contains a valid connection string.
|
||||
|
||||
- block:
|
||||
- name: Fail if postgres_connection_string_variable_name points to an undefined variable
|
||||
fail: msg="postgres_connection_string_variable_name is defined, but there is no variable with the name `{{ postgres_connection_string_variable_name }}`"
|
||||
when: "postgres_connection_string_variable_name not in vars"
|
||||
|
||||
- name: Get Postgres connection string from variable
|
||||
set_fact:
|
||||
postgres_db_connection_string: "{{ lookup('vars', postgres_connection_string_variable_name) }}"
|
||||
when: 'postgres_connection_string_variable_name is defined'
|
||||
|
||||
- name: Fail if playbook called incorrectly
|
||||
fail:
|
||||
msg: >-
|
||||
Either a `postgres_db_connection_string` variable or a `postgres_connection_string_variable_name` needs to be provided to this playbook, via `--extra-vars`.
|
||||
Example: `--extra-vars="postgres_db_connection_string=postgresql://username:password@localhost:<port>/database_name"` or `--extra-vars="postgres_connection_string_variable_name=matrix_appservice_discord_database_connString"`
|
||||
when: "postgres_db_connection_string is not defined or not postgres_db_connection_string.startswith('postgresql://')"
|
||||
|
||||
|
||||
# Defaults
|
||||
|
||||
- name: Set postgres_start_wait_time, if not provided
|
||||
set_fact:
|
||||
postgres_start_wait_time: 15
|
||||
when: "postgres_start_wait_time|default('') == ''"
|
||||
|
||||
|
||||
# Actual import work
|
||||
|
||||
- name: Ensure matrix-postgres is started
|
||||
service:
|
||||
name: matrix-postgres
|
||||
state: started
|
||||
daemon_reload: yes
|
||||
register: matrix_postgres_service_start_result
|
||||
|
||||
- name: Wait a bit, so that Postgres can start
|
||||
wait_for:
|
||||
timeout: "{{ postgres_start_wait_time }}"
|
||||
delegate_to: 127.0.0.1
|
||||
become: false
|
||||
when: "matrix_postgres_service_start_result.changed|bool"
|
||||
|
||||
- name: Import SQLite database from {{ sqlite_database_path }} into Postgres
|
||||
command:
|
||||
cmd: >-
|
||||
{{ matrix_host_command_docker }} run
|
||||
--rm
|
||||
--user={{ matrix_user_uid }}:{{ matrix_user_gid }}
|
||||
--cap-drop=ALL
|
||||
--network={{ matrix_docker_network }}
|
||||
--mount type=bind,src={{ sqlite_database_path }},dst=/in.db,ro
|
||||
--entrypoint=/bin/sh
|
||||
{{ matrix_postgres_pgloader_docker_image }}
|
||||
-c
|
||||
'pgloader /in.db {{ postgres_db_connection_string }}'
|
||||
|
||||
- name: Archive SQLite database ({{ sqlite_database_path }} -> {{ sqlite_database_path }}.backup)
|
||||
command:
|
||||
cmd: "mv {{ sqlite_database_path }} {{ sqlite_database_path }}.backup"
|
||||
|
||||
- name: Inject result
|
||||
set_fact:
|
||||
matrix_playbook_runtime_results: |
|
||||
{{
|
||||
matrix_playbook_runtime_results|default([])
|
||||
+
|
||||
[
|
||||
"NOTE: Your SQLite database file has been imported into Postgres. The original file has been moved from `{{ sqlite_database_path }}` to `{{ sqlite_database_path }}.backup`. When you've confirmed that the import went well and everything works, you should be able to safely delete this file."
|
||||
]
|
||||
}}
|
@ -74,8 +74,8 @@
|
||||
{{ matrix_postgres_docker_image_latest }}
|
||||
-c "cat /{{ server_path_postgres_dump|basename }} |
|
||||
{{ 'gunzip |' if server_path_postgres_dump.endswith('.gz') else '' }}
|
||||
grep -vE '^CREATE ROLE {{ matrix_postgres_connection_username }}' |
|
||||
grep -vE '^CREATE DATABASE {{ matrix_postgres_db_name }}' |
|
||||
grep -vE '{{ matrix_postgres_import_roles_ignore_regex }}' |
|
||||
grep -vE '{{ matrix_postgres_import_databases_ignore_regex }}' |
|
||||
psql -v ON_ERROR_STOP=1 -h matrix-postgres"
|
||||
|
||||
# This is a hack.
|
||||
|
@ -19,21 +19,24 @@
|
||||
tags:
|
||||
- import-postgres
|
||||
|
||||
- import_tasks: "{{ role_path }}/tasks/import_sqlite_db.yml"
|
||||
# The `run_postgres_import_sqlite_db` variable had better be renamed to be consistent,
|
||||
# but that's a breaking change which may cause trouble for people.
|
||||
- import_tasks: "{{ role_path }}/tasks/import_synapse_sqlite_db.yml"
|
||||
when: run_postgres_import_sqlite_db|bool
|
||||
tags:
|
||||
- import-sqlite-db
|
||||
- import-synapse-sqlite-db
|
||||
|
||||
# Perhaps we need a new variable here, instead of `run_postgres_import_sqlite_db`.
|
||||
- import_tasks: "{{ role_path }}/tasks/import_generic_sqlite_db.yml"
|
||||
when: run_postgres_import_sqlite_db|bool
|
||||
tags:
|
||||
- import-generic-sqlite-db
|
||||
|
||||
- import_tasks: "{{ role_path }}/tasks/upgrade_postgres.yml"
|
||||
when: run_postgres_upgrade|bool
|
||||
tags:
|
||||
- upgrade-postgres
|
||||
|
||||
- import_tasks: "{{ role_path }}/tasks/run_synapse_janitor.yml"
|
||||
when: run_postgres_synapse_janitor|bool
|
||||
tags:
|
||||
- run-postgres-synapse-janitor
|
||||
|
||||
- import_tasks: "{{ role_path }}/tasks/run_vacuum.yml"
|
||||
when: run_postgres_vacuum|bool
|
||||
tags:
|
||||
|
@ -1,117 +0,0 @@
|
||||
---
|
||||
|
||||
# Pre-checks
|
||||
|
||||
- name: Fail if Postgres not enabled
|
||||
fail:
|
||||
msg: "Postgres via the matrix-postgres role is not enabled (`matrix_postgres_enabled`). Cannot run synapse-janitor."
|
||||
when: "not matrix_postgres_enabled|bool"
|
||||
|
||||
- name: Fail if not aware of the risks
|
||||
fail:
|
||||
msg: >-
|
||||
Using Synapse Janitor is considered dangerous and may break your database.
|
||||
See https://github.com/spantaleev/matrix-docker-ansible-deploy/issues/465.
|
||||
If you'd like to run it anyway, add `--extra-vars='i_know_synapse_janitor_is_dangerous=1'` to your command.
|
||||
when: "i_know_synapse_janitor_is_dangerous|default('') == ''"
|
||||
|
||||
# Defaults
|
||||
|
||||
- name: Set postgres_start_wait_time, if not provided
|
||||
set_fact:
|
||||
postgres_start_wait_time: 15
|
||||
when: "postgres_start_wait_time|default('') == ''"
|
||||
|
||||
- name: Set postgres_synapse_janitor_wait_time, if not provided
|
||||
set_fact:
|
||||
postgres_synapse_janitor_wait_time: "{{ 7 * 86400 }}"
|
||||
when: "postgres_synapse_janitor_wait_time|default('') == ''"
|
||||
|
||||
- name: Set postgres_synapse_janitor_tool_path, if not provided
|
||||
set_fact:
|
||||
postgres_synapse_janitor_tool_path: "{{ matrix_postgres_base_path }}/synapse_janitor.sql"
|
||||
when: "postgres_synapse_janitor_tool_path|default('') == ''"
|
||||
|
||||
|
||||
# Actual janitor work
|
||||
|
||||
- name: Download synapse-janitor tool
|
||||
get_url:
|
||||
url: "{{ matrix_postgres_tool_synapse_janitor }}"
|
||||
dest: "{{ postgres_synapse_janitor_tool_path }}"
|
||||
force: true
|
||||
mode: 0550
|
||||
owner: "{{ matrix_user_username }}"
|
||||
group: "{{ matrix_user_groupname }}"
|
||||
|
||||
- name: Ensure matrix-postgres is started
|
||||
service:
|
||||
name: matrix-postgres
|
||||
state: started
|
||||
daemon_reload: yes
|
||||
|
||||
- name: Wait a bit, so that Postgres can start
|
||||
wait_for:
|
||||
timeout: "{{ postgres_start_wait_time }}"
|
||||
delegate_to: 127.0.0.1
|
||||
become: false
|
||||
|
||||
- import_tasks: tasks/util/detect_existing_postgres_version.yml
|
||||
|
||||
- name: Abort, if no existing Postgres version detected
|
||||
fail:
|
||||
msg: "Could not find existing Postgres installation"
|
||||
when: "not matrix_postgres_detected_existing|bool"
|
||||
|
||||
- name: Generate Postgres database synapse-janitor command
|
||||
set_fact:
|
||||
matrix_postgres_synapse_janitor_command: >-
|
||||
{{ matrix_host_command_docker }} run --rm --name matrix-postgres-synapse-janitor
|
||||
--user={{ matrix_user_uid }}:{{ matrix_user_gid }}
|
||||
--cap-drop=ALL
|
||||
--network={{ matrix_docker_network }}
|
||||
--env-file={{ matrix_postgres_base_path }}/env-postgres-psql
|
||||
--mount type=bind,src={{ postgres_synapse_janitor_tool_path }},dst=/synapse_janitor.sql,ro=true
|
||||
{{ matrix_postgres_docker_image_latest }}
|
||||
psql -v ON_ERROR_STOP=1 -h matrix-postgres {{ matrix_synapse_database_database }} -f /synapse_janitor.sql
|
||||
|
||||
- name: Note about Postgres purging alternative
|
||||
debug:
|
||||
msg: >-
|
||||
Running synapse-janitor with the following Postgres command: `{{ matrix_postgres_synapse_janitor_command }}`.
|
||||
If this crashes, you can stop all processes (`systemctl stop matrix-*`),
|
||||
start Postgres only (`systemctl start matrix-postgres`)
|
||||
and manually run the above command directly on the server.
|
||||
|
||||
- name: Populate service facts
|
||||
service_facts:
|
||||
|
||||
- set_fact:
|
||||
matrix_postgres_synapse_was_running: "{{ ansible_facts.services['matrix-synapse.service']|default(none) is not none and ansible_facts.services['matrix-synapse.service'].state == 'running' }}"
|
||||
|
||||
- name: Ensure matrix-synapse is stopped
|
||||
service:
|
||||
name: matrix-synapse
|
||||
state: stopped
|
||||
daemon_reload: yes
|
||||
|
||||
- name: Run synapse-janitor
|
||||
command: "{{ matrix_postgres_synapse_janitor_command }}"
|
||||
async: "{{ postgres_synapse_janitor_wait_time }}"
|
||||
poll: 10
|
||||
register: matrix_postgres_synapse_janitor_result
|
||||
|
||||
# Intentionally show the results
|
||||
- debug: var="matrix_postgres_synapse_janitor_result"
|
||||
|
||||
- name: Ensure matrix-synapse is started, if it previously was
|
||||
service:
|
||||
name: matrix-synapse
|
||||
state: started
|
||||
daemon_reload: yes
|
||||
when: "matrix_postgres_synapse_was_running|bool"
|
||||
|
||||
- name: Delete synapse-janitor tool
|
||||
file:
|
||||
path: "{{ postgres_synapse_janitor_tool_path }}"
|
||||
state: absent
|
@ -113,6 +113,13 @@
|
||||
daemon_reload: yes
|
||||
when: "matrix_postgres_enabled|bool and matrix_postgres_systemd_service_result.changed"
|
||||
|
||||
- include_tasks:
|
||||
file: "{{ role_path }}/tasks/util/create_additional_databases.yml"
|
||||
apply:
|
||||
tags:
|
||||
- always
|
||||
when: "matrix_postgres_enabled|bool and matrix_postgres_additional_databases|length > 0"
|
||||
|
||||
#
|
||||
# Tasks related to getting rid of the internal postgres server (if it was previously enabled)
|
||||
#
|
||||
|
@ -135,8 +135,8 @@
|
||||
{{ matrix_postgres_docker_image_latest }}
|
||||
-c "cat /in/{{ postgres_dump_name }} |
|
||||
{{ 'gunzip |' if postgres_dump_name.endswith('.gz') else '' }}
|
||||
grep -vE '^CREATE ROLE {{ matrix_postgres_connection_username }}' |
|
||||
grep -vE '^CREATE DATABASE {{ matrix_postgres_db_name }}' |
|
||||
grep -vE '{{ matrix_postgres_import_roles_ignore_regex }}' |
|
||||
grep -vE '{{ matrix_postgres_import_databases_ignore_regex }}' |
|
||||
psql -v ON_ERROR_STOP=1 -h matrix-postgres"
|
||||
|
||||
# This is a hack.
|
||||
|
@ -0,0 +1,40 @@
|
||||
---
|
||||
|
||||
# It'd be better if this is belonged to `validate_config.yml`, but it would have to be some loop-within-a-loop there,
|
||||
# and that's ugly. We also don't expect this to catch errors often. It's more of a defensive last-minute check.
|
||||
- name: Fail if additional database data appears invalid
|
||||
fail:
|
||||
msg: "Additional database definition ({{ additional_db }} lacks a required key: {{ item }}"
|
||||
when: "item not in additional_db"
|
||||
with_items: "{{ ['name', 'username', 'password'] }}"
|
||||
|
||||
# The SQL statements that we'll run against Postgres are stored in a file that others can't read.
|
||||
# This file will be mounted into the container and fed to Postgres.
|
||||
# This way, we avoid passing sensitive data around in CLI commands that other users on the system can see.
|
||||
- name: Create additional database initialization SQL file for {{ additional_db.name }}
|
||||
template:
|
||||
src: "{{ role_path }}/templates/sql/init-additional-db-user-and-role.sql.j2"
|
||||
dest: "/tmp/matrix-postgres-init-additional-db-user-and-role.sql"
|
||||
mode: 0600
|
||||
owner: "{{ matrix_user_uid }}"
|
||||
group: "{{ matrix_user_gid }}"
|
||||
|
||||
- name: Execute Postgres additional database initialization SQL file for {{ additional_db.name }}
|
||||
command:
|
||||
cmd: >-
|
||||
{{ matrix_host_command_docker }} run
|
||||
--rm
|
||||
--user={{ matrix_user_uid }}:{{ matrix_user_gid }}
|
||||
--cap-drop=ALL
|
||||
--env-file={{ matrix_postgres_base_path }}/env-postgres-psql
|
||||
--network {{ matrix_docker_network }}
|
||||
--mount type=bind,src=/tmp/matrix-postgres-init-additional-db-user-and-role.sql,dst=/matrix-postgres-init-additional-db-user-and-role.sql,ro
|
||||
--entrypoint=/bin/sh
|
||||
{{ matrix_postgres_docker_image_to_use }}
|
||||
-c
|
||||
'psql -h {{ matrix_postgres_connection_hostname }} --file=/matrix-postgres-init-additional-db-user-and-role.sql'
|
||||
|
||||
- name: Delete additional database initialization SQL file for {{ additional_db.name }}
|
||||
file:
|
||||
path: /tmp/matrix-postgres-init-additional-db-user-and-role.sql
|
||||
state: absent
|
@ -0,0 +1,23 @@
|
||||
---
|
||||
|
||||
- name: Ensure matrix-postgres is started
|
||||
service:
|
||||
name: matrix-postgres
|
||||
state: started
|
||||
daemon_reload: yes
|
||||
register: matrix_postgres_service_start_result
|
||||
|
||||
- name: Wait a bit, so that Postgres can start
|
||||
wait_for:
|
||||
timeout: "{{ matrix_postgres_additional_databases_postgres_start_wait_timeout_seconds }}"
|
||||
delegate_to: 127.0.0.1
|
||||
become: false
|
||||
when: "matrix_postgres_service_start_result.changed|bool"
|
||||
|
||||
- name: Create additional Postgres user and database
|
||||
include_tasks: "{{ role_path }}/tasks/util/create_additional_database.yml"
|
||||
with_items: "{{ matrix_postgres_additional_databases }}"
|
||||
loop_control:
|
||||
loop_var: additional_db
|
||||
# Suppress logging to avoid dumping the credentials to the shell
|
||||
no_log: true
|
168
roles/matrix-postgres/tasks/util/migrate_db_to_postgres.yml
Normal file
168
roles/matrix-postgres/tasks/util/migrate_db_to_postgres.yml
Normal file
@ -0,0 +1,168 @@
|
||||
---
|
||||
|
||||
- name: Fail if Postgres not enabled
|
||||
fail:
|
||||
msg: "Postgres via the matrix-postgres role is not enabled (`matrix_postgres_enabled`). Cannot migrate."
|
||||
when: "not matrix_postgres_enabled|bool"
|
||||
|
||||
- name: Fail if util called incorrectly (missing matrix_postgres_db_migration_request)
|
||||
fail:
|
||||
msg: "The `matrix_postgres_db_migration_request` variable needs to be provided to this util."
|
||||
when: "matrix_postgres_db_migration_request is not defined"
|
||||
|
||||
- name: Fail if util called incorrectly (invalid matrix_postgres_db_migration_request)
|
||||
fail:
|
||||
msg: "The `matrix_postgres_db_migration_request` variable needs to contain `{{ item }}`."
|
||||
with_items:
|
||||
- src
|
||||
- dst
|
||||
- caller
|
||||
- engine_variable_name
|
||||
- systemd_services_to_stop
|
||||
when: "item not in matrix_postgres_db_migration_request"
|
||||
|
||||
- name: Check if the provided source database file exists
|
||||
stat:
|
||||
path: "{{ matrix_postgres_db_migration_request.src }}"
|
||||
register: matrix_postgres_db_migration_request_src_stat_result
|
||||
|
||||
- name: Fail if provided source database file doesn't exist
|
||||
fail:
|
||||
msg: "File cannot be found on the server at {{ matrix_postgres_db_migration_request.src }}"
|
||||
when: "not matrix_postgres_db_migration_request_src_stat_result.stat.exists"
|
||||
|
||||
- block:
|
||||
- name: Ensure pgloader repository is present on self-build
|
||||
git:
|
||||
repo: "{{ matrix_postgres_pgloader_container_image_self_build_repo }}"
|
||||
dest: "{{ matrix_postgres_pgloader_container_image_self_build_src_path }}"
|
||||
version: "{{ matrix_postgres_pgloader_container_image_self_build_repo_branch }}"
|
||||
force: "yes"
|
||||
register: matrix_postgres_pgloader_git_pull_results
|
||||
|
||||
# If `stable` is used, we hit an error when processing /opt/src/pgloader/build/quicklisp/dists/quicklisp/software/uax-15-20201220-git/data/CompositionExclusions.txt:
|
||||
# > the octet sequence #(194) cannot be decoded
|
||||
#
|
||||
# The issue is described here and is not getting fixed for months: https://github.com/dimitri/pgloader/pull/1179
|
||||
#
|
||||
# Although we're not using the dimitri/pgloader image, the one we're using suffers from the same problem.
|
||||
- name: Switch pgloader base image from Debian stable (likely 10.x/Buster) to Bullseye
|
||||
lineinfile:
|
||||
path: "{{ matrix_postgres_pgloader_container_image_self_build_src_path }}/Dockerfile"
|
||||
regexp: "{{ item.match }}"
|
||||
line: "{{ item.replace }}"
|
||||
with_items:
|
||||
- match: '^FROM debian:stable-slim as builder$'
|
||||
replace: 'FROM debian:bullseye-slim as builder'
|
||||
- match: '^FROM debian:stable-slim$'
|
||||
replace: 'FROM debian:bullseye-slim'
|
||||
|
||||
- name: Ensure pgloader Docker image is built
|
||||
docker_image:
|
||||
name: "{{ matrix_postgres_pgloader_docker_image }}"
|
||||
source: build
|
||||
force_source: "{{ matrix_postgres_pgloader_git_pull_results.changed }}"
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
path: "{{ matrix_postgres_pgloader_container_image_self_build_src_path }}"
|
||||
pull: yes
|
||||
when: "matrix_postgres_pgloader_container_image_self_build|bool"
|
||||
|
||||
- name: Ensure pgloader Docker image is pulled
|
||||
docker_image:
|
||||
name: "{{ matrix_postgres_pgloader_docker_image }}"
|
||||
source: "{{ 'pull' if ansible_version.major > 2 or ansible_version.minor > 7 else omit }}"
|
||||
force_source: "{{ matrix_postgres_pgloader_docker_image_force_pull if ansible_version.major > 2 or ansible_version.minor >= 8 else omit }}"
|
||||
force: "{{ omit if ansible_version.major > 2 or ansible_version.minor >= 8 else matrix_postgres_pgloader_docker_image_force_pull }}"
|
||||
when: "not matrix_postgres_pgloader_container_image_self_build"
|
||||
|
||||
# Defaults
|
||||
|
||||
- name: Set postgres_start_wait_time, if not provided
|
||||
set_fact:
|
||||
postgres_start_wait_time: 15
|
||||
when: "postgres_start_wait_time|default('') == ''"
|
||||
|
||||
# Actual import work
|
||||
|
||||
# matrix-postgres is most likely started already
|
||||
- name: Ensure matrix-postgres is started
|
||||
service:
|
||||
name: matrix-postgres
|
||||
state: started
|
||||
daemon_reload: yes
|
||||
register: matrix_postgres_service_start_result
|
||||
|
||||
- name: Wait a bit, so that Postgres can start
|
||||
wait_for:
|
||||
timeout: "{{ postgres_start_wait_time }}"
|
||||
delegate_to: 127.0.0.1
|
||||
become: false
|
||||
when: "matrix_postgres_service_start_result.changed|bool"
|
||||
|
||||
# We only stop services here, leaving it to the caller to start them later.
|
||||
#
|
||||
# We can't start them, because they probably need to be reconfigured too (changing the configuration from using SQLite to Postgres, etc.),
|
||||
# before starting.
|
||||
#
|
||||
# Since the caller will be starting them, it might make sense to leave stopping to it as well.
|
||||
# However, we don't do it, because it's simpler having it here, and it also gets to happen only if we'll be doing an import.
|
||||
# If we bailed out (somewhere above), nothing would have gotten stopped. It's nice to leave this running in such cases.
|
||||
- name: Ensure systemd services blocking the database import are stopped
|
||||
service:
|
||||
name: "{{ item }}"
|
||||
state: stopped
|
||||
failed_when: false
|
||||
with_items: "{{ matrix_postgres_db_migration_request.systemd_services_to_stop }}"
|
||||
|
||||
- name: Import {{ matrix_postgres_db_migration_request.engine_old }} database from {{ matrix_postgres_db_migration_request.src }} into Postgres
|
||||
command:
|
||||
cmd: >-
|
||||
{{ matrix_host_command_docker }} run
|
||||
--rm
|
||||
--user={{ matrix_user_uid }}:{{ matrix_user_gid }}
|
||||
--cap-drop=ALL
|
||||
--network={{ matrix_docker_network }}
|
||||
--mount type=bind,src={{ matrix_postgres_db_migration_request.src }},dst=/in.db,ro
|
||||
--entrypoint=/bin/sh
|
||||
{{ matrix_postgres_pgloader_docker_image }}
|
||||
-c
|
||||
'pgloader {{ matrix_postgres_db_migration_request.pgloader_options|default([])|join(' ') }} /in.db {{ matrix_postgres_db_migration_request.dst }}'
|
||||
|
||||
- block:
|
||||
# We can't use `{{ role_path }}` here, neither with `import_tasks`, nor with `include_tasks`,
|
||||
# because it refers to the role that included this util, and not to the role this file belongs to.
|
||||
- import_tasks: "{{ role_path }}/../matrix-postgres/tasks/util/detect_existing_postgres_version.yml"
|
||||
|
||||
- set_fact:
|
||||
matrix_postgres_docker_image_to_use: "{{ matrix_postgres_docker_image_latest if matrix_postgres_detected_version_corresponding_docker_image == '' else matrix_postgres_detected_version_corresponding_docker_image }}"
|
||||
|
||||
- name: Execute additional Postgres SQL migration statements
|
||||
command:
|
||||
cmd: >-
|
||||
{{ matrix_host_command_docker }} run
|
||||
--rm
|
||||
--user={{ matrix_user_uid }}:{{ matrix_user_gid }}
|
||||
--cap-drop=ALL
|
||||
--env-file={{ matrix_postgres_base_path }}/env-postgres-psql
|
||||
--network={{ matrix_docker_network }}
|
||||
{{ matrix_postgres_docker_image_to_use }}
|
||||
psql --host=matrix-postgres --dbname={{ matrix_postgres_db_migration_request.additional_psql_statements_db_name }} --command='{{ item }}'
|
||||
with_items: "{{ matrix_postgres_db_migration_request.additional_psql_statements_list }}"
|
||||
|
||||
when: "matrix_postgres_db_migration_request.additional_psql_statements_list|default([])|length > 0"
|
||||
|
||||
- name: Archive {{ matrix_postgres_db_migration_request.engine_old }} database ({{ matrix_postgres_db_migration_request.src }} -> {{ matrix_postgres_db_migration_request.src }}.backup)
|
||||
command:
|
||||
cmd: "mv {{ matrix_postgres_db_migration_request.src }} {{ matrix_postgres_db_migration_request.src }}.backup"
|
||||
|
||||
- name: Inject result
|
||||
set_fact:
|
||||
matrix_playbook_runtime_results: |
|
||||
{{
|
||||
matrix_playbook_runtime_results|default([])
|
||||
+
|
||||
[
|
||||
"NOTE: Your {{ matrix_postgres_db_migration_request.engine_old }} database file has been imported into Postgres. The original database file has been moved from `{{ matrix_postgres_db_migration_request.src }}` to `{{ matrix_postgres_db_migration_request.src }}.backup`. When you've confirmed that the import went well and everything works, you should be able to safely delete this file."
|
||||
]
|
||||
}}
|
@ -0,0 +1,19 @@
|
||||
-- `CREATE USER` does not support `IF NOT EXISTS`, so we use this workaround to prevent an error and raise a notice instead.
|
||||
-- Seen here: https://stackoverflow.com/a/49858797
|
||||
DO $$
|
||||
BEGIN
|
||||
CREATE USER {{ additional_db.username }};
|
||||
EXCEPTION WHEN DUPLICATE_OBJECT THEN
|
||||
RAISE NOTICE 'not creating user {{ additional_db.username }}, since it already exists';
|
||||
END
|
||||
$$;
|
||||
|
||||
-- This is useful for initial user creation (since we don't assign a password above) and for handling subsequent password changes
|
||||
-- TODO - we should escape quotes in the password.
|
||||
ALTER ROLE {{ additional_db.username }} PASSWORD '{{ additional_db.password }}';
|
||||
|
||||
-- This will generate an error on subsequent execution
|
||||
CREATE DATABASE {{ additional_db.name }} WITH LC_CTYPE 'C' LC_COLLATE 'C' OWNER {{ additional_db.username }};
|
||||
|
||||
-- This is useful for changing the database owner subsequently
|
||||
ALTER DATABASE {{ additional_db.name }} OWNER TO {{ additional_db.username }};
|
@ -3,6 +3,7 @@
|
||||
Description=Matrix Postgres server
|
||||
After=docker.service
|
||||
Requires=docker.service
|
||||
DefaultDependencies=no
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
|
Reference in New Issue
Block a user