Compare commits

...

3 Commits

32 changed files with 671 additions and 0 deletions

View File

@ -4,6 +4,11 @@
Roles for deploying matrix infrastructure using ansible. Roles for deploying matrix infrastructure using ansible.
## Roles
- [`cinny`](roles/cinny/README.md): [Cinny](https://cinny.in/) Web Client
- [`element`](roles/element/README.md): [Element](https://element.io/) Web Client
## License ## License
[CNPLv7+](LICENSE.md): Cooperative Nonviolent Public License [CNPLv7+](LICENSE.md): Cooperative Nonviolent Public License

6
playbooks/cinny.yml Normal file
View File

@ -0,0 +1,6 @@
---
- name: Deploy and configure cinny
hosts: "{{ cinny_hosts | default('cinny') }}"
become: "{{ cinny_become | default(true) }}"
roles:
- role: finallycoffee.matrix.cinny

6
playbooks/element.yml Normal file
View File

@ -0,0 +1,6 @@
---
- name: Deploy and configure element
hosts: "{{ element_hosts | default('element') }}"
become: "{{ element_become | default(true) }}"
roles:
- role: finallycoffee.matrix.element

29
roles/cinny/README.md Normal file
View File

@ -0,0 +1,29 @@
# `finallycoffee.matrix.cinny` ansible role
> [!WARNING]
> This role is a WIP and not yet usable
## Supported deployment methods
Set your `deployment_method` to:
- [`docker` (docs)](docs/docker.md) (current default)
- `podman`
- [`nginx` (docs)](docs/nginx.md)
- [`tarball` (docs)](docs/tarball.md)
Not yet implemented but planned:
- `apache2`
- `caddy`
## Configuration
All cinny `config.json` configuration keys are available as a snake-cased ansible variable:
- `cinny_config_homeserver_list`
- `cinny_config_allow_custom_homeservers`
- [...]
If you want to provide structured configuration directly, you can either provide additional configuration in `cinny_config` or overwrite all existing defaults by setting `cinny_config_complete`.
To ensure cinny is removed from the system, set `cinny_state` to `absent` (default is `present`).

View File

@ -0,0 +1,24 @@
---
cinny_config_complete: >-
{{ cinny_config | default({})
| combine(cinny_default_config | default({})) }}
cinny_config: {}
cinny_default_config:
homeserverList: "{{ cinny_config_homeserver_list }}"
allowCustomHomeservers: "{{ cinny_config_allow_custom_homeservers }}"
featuredCommunities:
openAsDefault: "{{ cinny_config_featured_communities_open_as_default }}"
spaces: "{{ cinny_config_featured_communities_spaces }}"
rooms: "{{ cinny_config_featured_communities_rooms }}"
servers: "{{ cinny_config_featured_communities_servers }}"
hashRouter:
enabled: "{{ cinny_config_hash_router_enabled }}"
basename: "{{ cinny_config_hash_router_basename }}"
cinny_config_homeserver_list: []
cinny_config_allow_custom_homeservers: true
cinny_config_featured_communities_open_as_default: false
cinny_config_featured_communities_spaces: []
cinny_config_featured_communities_rooms: []
cinny_config_featured_communities_servers: []
cinny_config_hash_router_enabled: false
cinny_config_hash_router_basename: "/"

View File

@ -0,0 +1,28 @@
---
cinny_container_image: >-
{{
cinny_container_image_registry + '/'
+ ((cinny_container_image_namespace + '/')
if cinny_container_image_namespace | default(false, true) else '')
+ cinny_container_image_name + ':'
+ (cinny_container_image_tag | default('v' + cinny_version, true))
}}
cinny_container_image_registry: "ghcr.io"
cinny_container_image_namespace: "cinnyapp"
cinny_container_image_name: "cinny"
cinny_container_image_tag: ~
cinny_container_name: "cinny"
cinny_container_restart_policy: >-
{{ (cinny_deployment_method == 'docker')
| ternary('unless-stopped',
(cinny_deployment_method == 'podman' |
ternary('on-failure', 'always'))
}}
cinny_container_full_volumes: >-
{{ cinny_container_default_volumes
+ cinny_container_volumes | default([]) }}
cinny_container_default_volumes:
- "{{ cinny_config_file }}:/usr/share/nginx/html/config.json:ro"

View File

@ -0,0 +1,18 @@
---
cinny_user: cinny
cinny_state: "present"
cinny_version: "4.2.1"
cinny_deployment_method: "docker"
cinny_base_path: "/opt/cinny"
cinny_source_path: "{{ cinny_base_path }}/src"
cinny_dist_path: "{{ cinny_source_path }}/dist"
cinny_config_path: "{{ cinny_base_path }}/config"
cinny_config_file: "{{ cinny_config_path }}/config.json"
cinny_host_uid: >-
{{ cinny_user_info is defined
| ternary(cinny_user_info.uid, cinny_user) }}
cinny_host_gid:
{{ cinny_user_info is defined
| ternary(cinny_user_info.group, cinny_user) }}

View File

@ -0,0 +1,9 @@
---
cinny_nginx_listen_port: 8080
cinny_nginx_server: ~
cinny_nginx_location: /
cinny_nginx_available_sites: "/etc/nginx/sites-available"
cinny_nginx_enabled_sites: "/etc/nginx/sites-enabled"
cinny_nginx_vhost_name: "cinny"
cinny_nginx_vhost_enable: true

View File

@ -0,0 +1,10 @@
---
cinny_tarball_server: "https://github.com"
cinny_tarball_url: >-
{{ cinny_tarball_server }}/cinnyapp/cinny/releases/download/v{{ cinny_version }}/cinny-v{{ cinny_version }}.tar.gz
cinny_tarball_url_username: ~
cinny_tarball_url_password: ~
cinny_tarball_path: "/tmp/cinny-v{{ cinny_version }}.tar.gz"
cinny_running_version_file: "{{ cinny_source_path }}/cinny_version.txt"

View File

@ -0,0 +1,33 @@
# `cinny` deployment using `docker`
> [!NOTE]
> Needs the python library `docker` on the `ansible_host`.
## Configuration
The following options to the
[`docker_container` module](https://docs.ansible.com/ansible/latest/collections/community/docker/docker_container_module.html)
are available under the `cinny_container_` prefix:
- `env`
- `ports`
- `labels`
- `networks`
- `etc_hosts`
- `purge_networks`
The following variables are pre-populated by the role, so override them with care:
- `name`
- `image`
- `user`
- `volumes`
- `restart_policy`
## Pulling from a self-hosted container registry
Set `cinny_container_image_registry` to use a self-hosted docker registry / mirror / cache.
If you need to authenticate to your registry and are not yet logged in, set `cinny_container_image_registry_{username,password}` and the role will attempt to log in.
Set `cinny_container_image_registry_reauthorize` to `true` if you want to force a reauthorization at the registry.

11
roles/cinny/docs/nginx.md Normal file
View File

@ -0,0 +1,11 @@
# `cinny` deployment using `nginx` virtual host
The role will create a virtual host named after `cinny_nginx_vhost_name` (default: `cinny`) in `cinny_nginx_available_sites` (default: `/etc/nginx/sites-available`).
If you choose `cinny_nginx_vhost_enable` (default: `true`), it will also create a symlink from `cinny_nginx_enabled_sites` to it's vhost.
> [!TIP]
> If you are deploying multiple cinny instances on a single host, customize `cinny_nginx_vhost_name` to contain your `cinny_nginx_server` in order to avoid filename collisions.
> [!IMPORTANT]
> If `cinny_nginx_vhost_enable` is `true`, the role will expect `nginx` to be in the `$PATH` (in order to test the configuration using `nginx -t`)

View File

@ -0,0 +1,13 @@
# `cinny` deployment from a tarball
The role supports just downloading and extracting a tarball, which can
then be served with a webserver of your own choice.
When `cinny_deployment_method` is set to `tarball`, the directory
`cinny_dist_path` (defaults to `/opt/cinny/src/dist`) needs to be
served at the desired URL.
Additionally, the following equivalent rules to
[the sample docker+nginx configuration](https://github.com/cinnyapp/cinny/blob/dev/docker-nginx.conf)
are needed for the webapp to work properly. For hosting in a path
like `/app`, set `cinny_config_hash_router_basename`.

View File

@ -0,0 +1,37 @@
---
- name: Ensure cinny user '{{ cinny_user }}' is {{ cinny_state }}
ansible.builtin.user:
name: "{{ cinny_user }}"
system: "{{ cinny_user_system | default(true, true) }}"
create_home: "{{ cinny_user_create_home | default(false, true) }}"
state: "{{ cinny_state }}"
groups: "{{ cinny_user_groups | default(omit) }}"
append: "{{ cinny_user_groups_append | default(omit) }}"
register: cinny_user_info
- name: Ensure host path are {{ cinny_state }}
ansible.builtin.file:
name: "{{ path.name }}"
state: "{{ (cinny_state == 'present') | ternary('directory', 'absent') }}"
owner: "{{ path.owner | default(cinny_host_uid) }}"
group: "{{ path.group | default(cinny_host_gid) }}"
mode: "{{ path.mode | default('0750') }}"
loop_control:
loop_var: path
label: "{{ path.name }}"
loop:
- name: "{{ cinny_base_path }}"
mode: '0755'
- name: "{{ cinny_config_path }}"
mode: '0755'
- name: "{{ cinny_source_path }}"
mode: '0755'
- name: Ensure config file is {{ cinny_state }}
ansible.builtin.copy:
content: "{{ cinny_config | to_nice_json }}"
dest: "{{ cinny_config_file }}"
owner: "{{ cinny_host_uid }}"
group: "{{ cinny_host_gid }}"
mode: "{{ cinny_config_file_mode | default('0664') }}"
when: cinny_state == 'present'

View File

@ -0,0 +1,3 @@
---
- fail:
msg: "Not yet implemented"

View File

@ -0,0 +1,3 @@
---
- fail:
msg: "Not yet implemented"

View File

@ -0,0 +1,33 @@
---
- name: Ensure docker client is logged {{ (cinny_state == 'present') | ternary('in', 'out') }}
community.docker.docker_login:
registry_url: "{{ cinny_container_image_registry }}"
username: "{{ cinny_container_image_registry_username }}"
password: "{{ cinny_container_image_registry_password }}"
reauthorize: "{{ cinny_container_image_registry_reauthorize | default(omit, true) }}"
state: "{{ cinny_state }}"
when:
- cinny_container_image_registry_username | default(false, true)
- cinny_container_image_registry_password | default(false, true)
- name: Ensure container image '{{ cinny_container_image }}' is {{ cinny_state }} locally
community.docker.docker_image:
name: "{{ cinny_container_image }}"
state: "{{ cinny_state }}"
source: "{{ cinny_container_source }}"
force_source: "{{ cinny_container_image_tag | default(false, true) }}"
- name: Ensure container '{{ cinny_container_name }}' is {{ cinny_state }}
community.docker.docker_container:
name: "{{ cinny_container_name }}"
image: "{{ cinny_container_image }}"
state: "{{ (cinny_state == 'present') | ternary('started', 'absent') }}"
env: "{{ cinny_container_env | default(omit) }}"
user: "{{ cinny_container_user }}"
ports: "{{ cinny_container_ports | default(omit) }}"
labels: "{{ cinny_container_labels | default(omit) }}"
volumes: "{{ cinny_container_full_volumes }}"
networks: "{{ cinny_container_networks | default(omit) }}"
etc_hosts: "{{ cinny_container_etc_hosts | default(omit) }}"
restart_policy: "{{ cinny_container_restart_policy }}"
purge_networks: "{{ cinny_container_purge_networks | default(omit) }}"

View File

@ -0,0 +1,44 @@
---
- name: Deploy nginx virtual host config file
ansible.builtin.template:
src: nginx.conf.j2
dest: "{{ cinny_nginx_available_sites }}/{{ cinny_nginx_vhost_name }}"
mode: "0640"
when: cinny_state == 'present'
- name: Enable nginx virtual host
ansible.builtin.file:
path: "{{ cinny_nginx_enabled_sites }}/{{ cinny_nginx_vhost_name }}"
src: "{{ cinny_nginx_available_sites }}/{{ cinny_nginx_vhost_name }}"
state: "{{ (cinny_state == 'present') | ternary('link', 'absent') }}"
when: cinny_nginx_vhost_enable
- name: Clean up nginx virtural host config file
ansible.builtin.file:
path: "{{ cinny_nginx_available_sites }}/{{ cinny_nginx_vhost_name }}"
state: absent
when: cinny_state == 'absent'
- name: Ensure nginx configuration is valid
ansible.builtin.command:
cmd: "nginx -t"
when:
- cinny_state == 'present'
- cinny_nginx_vhost_enable
- name: Reload nginx using systemd
ansible.builtin.systemd_service:
name: "nginx.service"
state: reloaded
when:
- cinny_state == 'present'
- cinny_nginx_vhost_enable
- ansible_facts['service_mgr'] == 'systemd'
- name: Inform user about required nginx reload
ansible.builtin.debug:
msg: "Restart nginx service (no systemd found)"
when:
- cinny_state == 'present'
- cinny_nginx_vhost_enable
- ansible_facts['service_mgr'] != 'systemd'

View File

@ -0,0 +1,22 @@
---
- name: Ensure container image '{{ cinny_container_image }}' is {{ cinny_state }} locally
containers.podman.podman_image:
name: "{{ cinny_container_image }}"
state: "{{ cinny_state }}"
pull: "{{ cinny_container_source == 'pull' }}"
force: "{{ cinny_container_image_tag | default(false, true) }}"
- name: Ensure container '{{ cinny_container_name }}' is {{ cinny_state }}
containers.podman.podman_container:
name: "{{ cinny_container_name }}"
image: "{{ cinny_container_image }}"
state: "{{ (cinny_state == 'present') | ternary('started', 'absent') }}"
env: "{{ cinny_container_env | default(omit) }}"
user: "{{ cinny_container_user }}"
ports: "{{ cinny_container_ports | default(omit) }}"
labels: "{{ cinny_container_labels | default(omit) }}"
volumes: "{{ cinny_container_full_volumes }}"
network: "{{ cinny_container_networks | default(omit) }}"
hostname: "{{ cinny_container_hostname | default(omit) }}"
etc_hosts: "{{ cinny_container_etc_hosts | default(omit) }}"
restart_policy: "{{ cinny_container_restart_policy }}"

View File

@ -0,0 +1,46 @@
---
- name: Check if running cinny version is saved on host
ansible.builtin.stat:
path: "{{ cinny_running_version_file }}"
register: cinny_running_version_st
- name: Retrieve running cinny version
ansible.builtin.slurp:
path: "{{ cinny_running_version_file }}"
register: cinny_running_version_info
when: cinny_running_version_st.stat.exist
- name: Extract running cinny version
set_fact:
cinny_is_update: >-
{{ not cinny_running_version_st.stat.exist or
(cinny_version | version(cinny_running_version, 'gt', version_type='semver'))
vars:
cinny_running_version: >-
{{ (cinny_running_version_info is defined)
| ternary(cinny_running_version_info['content'] | b64decode, false) }}
- name: Download tarball from GitHub release page
ansible.builtin.get_url:
url: "{{ cinny_tarball_url }}"
dest: "{{ cinny_tarball_path }}"
url_username: "{{ cinny_tarball_url_username | default(omit, true) }}"
url_password: "{{ cinny_tarball_url_password | default(omit, true) }}"
mode: "0664"
when: cinny_is_update
- name: Ensure old application files are gone
ansible.builtin.file:
path: "{{ cinny_dist_path }}"
state: absent
when: cinny_is_update
- name: Extract tarball to {{ cinny_source_path }}
ansible.builtin.unarchive:
src: "{{ cinny_tarball_path }}"
dest: "{{ cinny_source_path }}"
remote_src: true
owner: "{{ cinny_host_uid }}"
group: "{{ cinny_host_gid }}"
mode: "u+rwX,g+rwX,o+rX"
when: cinny_is_update

View File

@ -0,0 +1,23 @@
---
- name: Check if state is valid
ansible.builtin.fail:
msg: "Unknown state '{{ cinny_state }}'. Valid states are {{ cinny_states | join(', ') }}"
when: cinny_state not in cinny_states
- name: Check if deployment method is supported
ansible.builtin.fail:
msg: "Deployment method '{{ cinny_deployment_method }}' is not supported! (supported are: {{ cinny_deployment_methods | join(', ') }})"
when: cinny_deployment_method not in cinny_deployment_methods
- name: Include base configuration
ansible.builtin.include_tasks:
file: configure.yml
- name: Deploy tarball if required
ansible.builtin.include_tasks:
file: deploy-tarball.yml
when: cinny_deployment_method in cinny_needs_tarball
- name: Deploy using {{ cinny_deployment_method }}
ansible.builtin.include_tasks:
file: "deploy-{{ cinny_deployment_method }}.yml"

View File

@ -0,0 +1,23 @@
server {
listen {{ cinny_nginx_listen_port }};
listen [::]:{{ cinny_nginx_listen_port }};
{%- if cinny_nginx_server_name | default(false, true) %}
server_name {{ cinny_nginx_server_name }};
{%- endif %}
location {{ cinny_nginx_location }} {
root {{ cinny_dist_path }};
rewrite ^/config.json$ /config.json break;
rewrite ^/manifest.json$ /manifest.json break;
rewrite ^.*/olm.wasm$ /olm.wasm break;
rewrite ^/sw.js$ /sw.js break;
rewrite ^/pdf.worker.min.js$ /pdf.worker.min.js break;
rewrite ^/public/(.*)$ /public/$1 break;
rewrite ^/assets/(.*)$ /assets/$1 break;
rewrite ^(.+)$ /index.html break;
}
}

17
roles/cinny/vars/main.yml Normal file
View File

@ -0,0 +1,17 @@
---
cinny_states:
- present
- absent
cinny_deployment_methods:
- docker
- podman
- nginx
- caddy
- apache2
- tarball
cinny_needs_tarball:
- nginx
- caddy
- apache2

24
roles/element/README.md Normal file
View File

@ -0,0 +1,24 @@
# `finallycoffee.matrix.element` ansible role
## Deployment method
Deploy the [element web-app](https://element.io/)
using the following supported methods by setting `element_deployment_method` to it:
- [`docker` (docs)](docs/docker.md) (default)
- `podman`
Planned deployment methods:
- `tarball`
- `nginx`
- `apache2`
## Configuration
Configure your element web-app instance by setting `element_config` directly
or use flattened config keys with the `element_config_` prefix.
For all available keys see
[the upstream configuration manual](https://github.com/element-hq/element-web/blob/develop/docs/config.md)
or [the role defaults in `defaults/config.yml`](defaults/config.yml).

View File

@ -0,0 +1,10 @@
---
element_config_complete: >-
{{ element_config | default({})
| combine(element_default_config | default({})) }}
element_config: {}
element_default_config:
default_server_name: "{{ element_config_default_server_name }}"
show_labs_settings: "{{ element_config_show_labs_settings }}"
element_config_default_server_name: "matrix.org"
element_config_show_labs_settings: false

View File

@ -0,0 +1,25 @@
---
element_container_image: >-
{{
element_container_image_registry + '/'
+ ((element_container_image_namespace + '/')
if element_container_image_namespace | default(false, true) else '')
+ element_container_image_name + ':'
+ (element_container_image_tag | default('v' + element_version, true))
}}
element_container_image_registry: "docker.io"
element_container_image_namespace: "vectorim"
element_container_image_name: "element-web"
element_container_image_tag: ~
element_container_name: "element-web"
element_container_restart_policy: >-
{{ (element_deployment_method == 'docker')
| ternary('unless-stopped',
(element_deployment_method == 'podman' |
ternary('on-failure', 'always'))
}}
element_container_full_volumes: >-
{{ element_container_default_volumes
+ element_container_volumes | default([]) }}
element_container_default_volumes:
- "{{ element_config_file }}:/app/config.json:ro"

View File

@ -0,0 +1,18 @@
---
element_user: element
element_state: "present"
element_version: "1.11.77"
element_deployment_method: "docker"
element_base_path: "/opt/element"
element_source_path: "{{ element_base_path }}/src"
element_dist_path: "{{ element_source_path }}/dist"
element_config_path: "{{ element_base_path }}/config"
element_config_file: "{{ element_config_path }}/config.json"
element_host_uid: >-
{{ element_user_info is defined
| ternary(element_user_info.uid, element_user) }}
element_host_gid:
{{ element_user_info is defined
| ternary(element_user_info.group, element_user) }}

View File

@ -0,0 +1,33 @@
# `element` deployment using `docker`
> [!NOTE]
> Needs the python library `docker` on the `ansible_host`.
## Configuration
The following options to the
[`docker_container` module](https://docs.ansible.com/ansible/latest/collections/community/docker/docker_container_module.html)
are available under the `element_container_` prefix:
- `env`
- `ports`
- `labels`
- `networks`
- `etc_hosts`
- `purge_networks`
The following variables are pre-populated by the role, so override them with care:
- `name`
- `image`
- `user`
- `volumes`
- `restart_policy`
## Pulling from a self-hosted container registry
Set `element_container_image_registry` to use a self-hosted docker registry / mirror / cache.
If you need to authenticate to your registry and are not yet logged in, set `element_container_image_registry_{username,password}` and the role will attempt to log in.
Set `element_container_image_registry_reauthorize` to `true` if you want to force a reauthorization at the registry.

View File

@ -0,0 +1,35 @@
---
- name: Ensure element user '{{ element_user }}' is {{ element_state }}
ansible.builtin.user:
name: "{{ element_user }}"
system: "{{ element_user_system | default(true, true) }}"
create_home: "{{ element_user_create_home | default(false, true) }}"
state: "{{ element_state }}"
register: element_user_info
- name: Ensure host paths are {{ element_state }}
ansible.builtin.file:
name: "{{ path.name }}"
state: "{{ (element_state == 'present') | ternary('directory', 'absent') }}"
owner: "{{ path.owner | default(element_host_uid) }}"
group: "{{ path.group | default(element_host_gid) }}"
mode: "{{ path.mode | default('0750') }}"
loop_control:
loop_var: path
label: "{{ path.name }}"
loop:
- name: "{{ element_base_path }}"
mode: '0755'
- name: "{{ element_config_path }}"
mode: '0755'
- name: "{{ element_source_path }}"
mode: '0750'
- name: Ensure config file is {{ element_state }}
ansible.builtin.copy:
content: "{{ element_config | to_nice_json }}"
dest: "{{ element_config_file }}"
owner: "{{ element_host_uid }}"
group: "{{ element_host_gid }}"
mode: "{{ element_config_file_mode | default('0664') }}"
when: element_state == 'present'

View File

@ -0,0 +1,33 @@
---
- name: Ensure docker client is logged {{ (element_state == 'present') | ternary('in', 'out') }}
community.docker.docker_login:
registry_url: "{{ element_container_image_registry }}"
username: "{{ element_container_image_registry_username }}"
password: "{{ element_container_image_registry_password }}"
reauthorize: "{{ element_container_image_registry_reauthorize | default(omit, true) }}"
state: "{{ element_state }}"
when:
- element_container_image_registry_username | default(false, true)
- element_container_image_registry_password | default(false, true)
- name: Ensure container image '{{ element_container_image }}' is {{ element_state }} locally
community.docker.docker_image:
name: "{{ element_container_image }}"
state: "{{ element_state }}"
source: "{{ element_container_source }}"
force_source: "{{ element_container_image_tag | default(false, true) }}"
- name: Ensure container '{{ element_container_name }}' is {{ element_state }}
community.docker.docker_container:
name: "{{ element_container_name }}"
image: "{{ element_container_image }}"
state: "{{ (element_state == 'present') | ternary('started', 'absent') }}"
env: "{{ element_container_env | default(omit) }}"
user: "{{ element_container_user }}"
ports: "{{ element_container_ports | default(omit) }}"
labels: "{{ element_container_labels | default(omit) }}"
volumes: "{{ element_container_full_volumes }}"
networks: "{{ element_container_networks | default(omit) }}"
etc_hosts: "{{ element_container_etc_hosts | default(omit) }}"
restart_policy: "{{ element_container_restart_policy }}"
purge_networks: "{{ element_container_purge_networks | default(omit) }}"

View File

@ -0,0 +1,22 @@
---
- name: Ensure container image '{{ element_container_image }}' is {{ element_state }} locally
containers.podman.podman_image:
name: "{{ element_container_image }}"
state: "{{ element_state }}"
pull: "{{ element_container_source == 'pull' }}"
force: "{{ element_container_image_tag | default(false, true) }}"
- name: Ensure container '{{ element_container_name }}' is {{ element_state }}
containers.podman.podman_container:
name: "{{ element_container_name }}"
image: "{{ element_container_image }}"
state: "{{ (element_state == 'present') | ternary('started', 'absent') }}"
env: "{{ element_container_env | default(omit) }}"
user: "{{ element_container_user }}"
ports: "{{ element_container_ports | default(omit) }}"
labels: "{{ element_container_labels | default(omit) }}"
volumes: "{{ element_container_full_volumes }}"
network: "{{ element_container_networks | default(omit) }}"
hostname: "{{ element_container_hostname | default(omit) }}"
etc_hosts: "{{ element_container_etc_hosts | default(omit) }}"
restart_policy: "{{ element_container_restart_policy }}"

View File

@ -0,0 +1,20 @@
---
- name: Check if state is valid
ansible.builtin.fail:
msg: "Unknown state '{{ element_state }}'. Valid states are {{ element_states | join(', ') }}"
when: element_state not in element_states
- name: Check if deployment method is supported
ansible.builtin.fail:
msg: >-
Deployment method '{{ element_deployment_method }}' is not supported!
Supported are: {{ element_deployment_methods | join(', ') }}
when: element_deployment_method not in element_deployment_methods
- name: Include base configuration
ansible.builtin.include_tasks:
file: configure.yml
- name: Deploy using {{ element_deployment_method }}
ansible.builtin.include_tasks:
file: "deploy-{{ element_deployment_method }}.yml"

View File

@ -0,0 +1,8 @@
---
element_state:
- present
- absent
element_deployment_methods:
- docker
- podman