Compare commits

..

22 Commits

Author SHA1 Message Date
b14f36c7e8
meta: update collection version to 0.3.0 2025-04-27 18:44:40 +02:00
762e2ffc27
feat(mosh): add ansible role 2025-04-27 17:36:49 +02:00
115cfa8236
feat(openssh): add ansible role 2025-04-27 13:59:41 +02:00
e27eb145f1
update(minio): bump container tag to RELEASE.2025-04-22T22-12-26Z 2025-04-25 19:36:15 +02:00
c286e1a6b4
feat(docker_registry): add playbook to manage registry credentials 2025-04-24 16:55:42 +02:00
517a2fe96c
update(nginx): bump version to 1.28.0 2025-04-24 15:10:24 +02:00
9d4baad491
fix(lego): only start systemd service if certificates are not present or changes occured 2025-04-23 15:36:18 +02:00
0090baee97
feat(docker): support fedora targets 2025-04-21 18:17:36 +02:00
0111f74f0f
feat(packages): add dnf(5) support for installing system packages 2025-04-21 15:41:41 +02:00
83d223defa
feat(bootstrap): add support for dnf/fedora 2025-04-21 15:38:37 +02:00
c9b2f61787
feat: add playbooks for bootstrapping (installing python) and installing system packages (debian) 2025-04-21 13:32:13 +02:00
0771787c98
feat(docker): add ansible role 2025-04-20 21:19:31 +02:00
1024921a74
feat: add user role 2025-04-20 15:36:43 +02:00
62263726fa
update(nginx): bump version to 1.27.5 2025-04-16 16:45:40 +02:00
ae887a1936
update(lego): bump version to 4.23.0 2025-04-16 16:45:09 +02:00
1e57ae1ec0
update(minio): bump container tag to RELEASE.2025-04-08T15-41-24Z 2025-04-14 20:12:27 +02:00
90dae1311f
meta: bump galaxy version to 0.2.1, require community.docker@4.2.0 2025-04-07 17:23:30 +02:00
3733c0b7ab
feat(lego): add auto-update mechanism 2025-04-07 17:23:26 +02:00
877c5a137a
fix(lego): quoting issues leading to wrongful error 2025-04-07 16:29:06 +02:00
11d4b397ef
fix(lego): ensure variables are either defined or have null-check handling 2025-04-06 09:11:28 +02:00
76e89db5c6
refactor(minio): add state and deployment_method parametrization, bump version to RELEASE.2025-04-03T14-56-28Z 2025-04-05 17:34:33 +02:00
4bddc95161
fix(restic): if repository exists but is locked, ensure it is unlocked 2025-04-05 15:39:17 +02:00
61 changed files with 888 additions and 302 deletions

@ -1,20 +1,27 @@
namespace: finallycoffee
name: base
version: 0.2.0
version: 0.3.0
readme: README.md
authors:
- transcaffeine <transcaffeine@finally.coffee>
description: Roles for base services which are common dependencies other services like databases
description: >-2
Roles for base services which are core functionality like managing packages
and ssh or common dependencies other services like databases
dependencies:
"community.docker": "^3.0.0"
"community.docker": "^4.2.0"
"community.general": "^10.0.0"
license_file: LICENSE.md
build_ignore:
- '*.tar.gz'
repository: https://git.finally.coffee/finallycoffee/base
issues: https://codeberg.org/finallycoffee/ansible-collection-base/issues
tags:
- bootstrap
- ssh
- mosh
- docker
- lego
- minio
- nginx
- restic
- user_management

78
playbooks/bootstrap.yml Normal file

@ -0,0 +1,78 @@
---
- name: Bootstrap everything needed for an ansible connection
hosts: "{{ target_hosts | default('all', true) }}"
become: "{{ target_host_become | default(true, false) }}"
gather_facts: false
pre_tasks:
- name: Gather information about the target system id
ansible.builtin.raw: "cat /etc/os-release | grep '^ID=' | cut -d '=' -f2"
register: target_host_os_info
check_mode: false
changed_when: false
- name: Set /etc/os-release system id
ansible.builtin.set_fact:
target_host_system_id: "{{ target_host_os_info.stdout_lines | first | trim }}"
delegate_to: localhost
- name: Gather information about the target system version
ansible.builtin.raw: "cat /etc/os-release | grep '^VERSION_ID=' | cut -d '=' -f2"
register: target_host_os_info_version
check_mode: false
changed_when: false
- name: Set /etc/os-release system version id
ansible.builtin.set_fact:
target_host_system_version_id: "{{ target_host_os_info_version.stdout_lines | first | trim }}"
delegate_to: localhost
tasks:
- name: Ensure apt bootstrap packages are installed
ansible.builtin.raw: "apt install {{ apt_bootstrap_packages | join(' ') }}"
register: target_host_apt_info
when: target_host_system_id in targets_using_apt
changed_when:
- "'0 upgraded' not in target_host_apt_info.stdout_lines | last"
- "'0 newly installed' not in target_host_apt_info.stdout_lines | last"
- name: Ensure dnf < 4 bootstrap packages are installed
ansible.builtin.raw: "dnf install --assumeyes {{ dnf4_bootstrap_packages | join(' ') }}"
register: target_host_dnf_info
when:
- target_host_system_id in targets_using_dnf4.keys()
- target_host_system_version_id | int < targets_using_dnf4[target_host_system_id]
changed_when:
- "(target_host_dnf_info.stdout_lines | last) != 'Nothing to do.'"
- name: Ensure dnf5 bootstrap packages are installed
ansible.builtin.raw: "dnf install --assumeyes {{ dnf5_bootstrap_packages | join(' ') }}"
register: target_host_dnf_info
when:
- target_host_system_id in targets_using_dnf5.keys()
- target_host_system_version_id | int >= targets_using_dnf5[target_host_system_id]
changed_when:
- "(target_host_dnf_info.stdout_lines | last) != 'Nothing to do.'"
- name: Sort hosts into os-specific groups
ansible.builtin.group_by:
key: >-2
{{ (os_group_prefix
| default(false, true)
| ternary(os_group_prefix | default('') + (os_group_seperator | default('_')), ''))
+ target_host_system_id }}
when: target_hosts_sort_into_system_ids | default(false, true)
changed_when: false
delegate_to: localhost
vars:
targets_using_apt:
- debian
- ubuntu
apt_bootstrap_packages:
- python3
- python3-apt
# default package manager is dnf5 since fedora 41
# https://fedoraproject.org/wiki/Changes/SwitchToDnf5#Current_status
targets_using_dnf4:
fedora: 41
targets_using_dnf5:
fedora: 41
dnf4_bootstrap_packages:
- python3
- python3-dnf
- python3-libdnf
dnf5_bootstrap_packages:
- python3-libdnf5

6
playbooks/docker.yml Normal file

@ -0,0 +1,6 @@
---
- name: Install and configure docker daemon
hosts: "{{ docker_hosts | default('docker', true) }}"
become: "{{ docker_become | default(false, true) }}"
roles:
- role: finallycoffee.base.docker

@ -0,0 +1,16 @@
---
- name: Manage docker registry credentials
hosts: "{{ docker_hosts | default('docker', true) }}"
become: "{{ docker_become | default(false) }}"
gather_facts: "{{ docker_registry_gather_facts | default(true) }}"
tasks:
- name: Manage docker registry credentials
community.docker.docker_login:
registry_url: "{{ docker_registry.registry }}"
username: "{{ docker_registry.username | default(omit) }}"
password: "{{ docker_registry.password | default(omit) }}"
state: "{{ docker_registry.state | default('present') }}"
loop: "{{ docker_registries | default([], true) }}"
loop_control:
loop_var: "docker_registry"
label: "{{ docker_registry.username}}@{{ docker_registry.registry }}"

6
playbooks/mosh.yml Normal file

@ -0,0 +1,6 @@
---
- name: Manage and configure mosh
hosts: "{{ mosh_hosts | default('mosh', true) }}"
become: "{{ mosh_become | default(true) }}"
roles:
- role: finallycoffee.base.mosh

7
playbooks/openssh.yml Normal file

@ -0,0 +1,7 @@
---
- name: Ensure openssh is installed and configured
hosts: "{{ openssh_target | default('openssh') }}"
become: "{{ openssh_become | default(true) }}"
gather_facts: "{{ openssh_gather_facts | default(true) }}"
roles:
- role: finallycoffee.base.openssh

24
playbooks/packages.yml Normal file

@ -0,0 +1,24 @@
---
- name: Install system packages on the remote
hosts: "{{ target_hosts | default('all', true) }}"
become: "{{ target_host_become | default(true, true) }}"
gather_facts: "{{ target_host_gather_facts | default(true, true) }}"
tasks:
- name: Install packages (apt)
ansible.builtin.apt:
package: "{{ package.name }}"
state: "{{ package.state | default('present') }}"
loop: "{{ system_packages | default([], true) }}"
loop_control:
loop_var: package
label: "{{ package.name }}"
when: ansible_facts['pkg_mgr'] == 'apt'
- name: Install packages (dnf)
ansible.builtin.dnf:
name: "{{ package.name }}"
state: "{{ package.state | default('present') }}"
loop: "{{ system_packages | default([], true) }}"
loop_control:
loop_var: package
label: "{{ package.name }}"
when: ansible_facts['pkg_mgr'] in ['dnf', 'dnf5', 'yum']

7
playbooks/user.yml Normal file

@ -0,0 +1,7 @@
---
- name: Configure user accounts
hosts: "{{ user_hosts | default('all', true) }}"
become: "{{ user_role_become | default(false, true) }}"
gather_facts: "{{ user_role_gather_facts | default(false, true) }}"
roles:
- role: finallycoffee.base.user

13
roles/docker/README.md Normal file

@ -0,0 +1,13 @@
# `finallycoffee.base.docker` ansible role
Install and configure the docker daemon.
## Configuration
- `docker_daemon_config` - configuration for the docker daemon
- `docker_remove_legacy_packages` - clean up old versions of docker (see https://docs.docker.com/engine/install/debian/#uninstall-old-versions)
## Plugins
- `docker_plugin_buildx_enable` - enable the buildx plugin
- `docker_plugin_compose_enable` - enable docker compose

@ -0,0 +1,31 @@
---
docker_apt_key_url: "https://download.docker.com/linux/debian/gpg"
docker_apt_key_id: "9DC858229FC7DD38854AE2D88D81803C0EBFCD88"
docker_apt_arch: amd64
docker_apt_release_channel: stable
docker_apt_repository_url: "https://download.docker.com/linux/debian"
docker_apt_repository: >-2
deb [arch={{ docker_apt_arch }}] {{ docker_apt_repository_url }} {{ ansible_distribution_release }} {{ docker_apt_release_channel }}
docker_apt_cli_package: "docker-ce-cli"
docker_apt_plugin_buildx_package: "docker-buildx-plugin"
docker_apt_plugin_compose_package: "docker-compose-plugin"
docker_apt_base_packages:
- "docker-ce"
- "docker-ce-cli"
- "containerd.io"
docker_apt_packages: >-2
{{
docker_apt_base_packages
+ (docker_plugin_buildx_enable | default(false)
| ternary([ docker_apt_plugin_buildx_package ], []))
+ (docker_plugin_compose_enable | default(false)
| ternary([ docker_apt_plugin_compose_package ], []))
}}
docker_apt_legacy_packages:
- "docker.io"
- "docker-compose"
- "docker-doc"
- "podman-docker"
- "containerd"
- "runc"

@ -0,0 +1,34 @@
---
docker_fedora_repo_name: "docker-ce-stable"
docker_fedora_repo_description: "Docker CE Stable - $basearch"
docker_fedora_repo_url: "https://download.docker.com/linux/fedora/$releasever/$basearch/stable"
docker_fedora_repo_validate_certs: true
docker_fedora_repo_gpg_check: true
docker_fedora_repo_gpg_key: "https://download.docker.com/linux/fedora/gpg"
docker_fedora_cli_package: "docker-ce-cli"
docker_fedora_plugin_buildx_package: "docker-buildx-plugin"
docker_fedora_plugin_compose_package: "docker-compose-plugin"
docker_fedora_base_packages:
- "docker-ce"
- "docker-ce-cli"
- "containerd.io"
docker_fedora_packages: >-2
{{
docker_fedora_base_packages
+ (docker_plugin_buildx_enable | default(false)
| ternary([ docker_fedora_plugin_buildx_package ], []))
+ (docker_plugin_compose_enable | default(false)
| ternary([ docker_fedora_plugin_compose_package ], []))
}}
docker_fedora_legacy_packages:
- "docker"
- "docker-client"
- "docker-client-latest"
- "docker-common"
- "docker-latest"
- "docker-latest-logrotate"
- "docker-logrotate"
- "docker-selinux"
- "docker-engine-selinux"
- "docker-engine"

@ -0,0 +1,13 @@
---
docker_state: "present"
docker_daemon_config: {}
docker_daemon_config_file: "/etc/docker/daemon.json"
docker_daemon_config_file_mode: "0644"
docker_daemon_config_owner: root
docker_daemon_config_group: "{{ docker_daemon_config_owner }}"
docker_plugin_buildx_enable: false
docker_plugin_compose_enable: false
docker_remove_legacy_packages: true

@ -0,0 +1,5 @@
---
docker_systemd_service_name: "docker.service"
docker_systemd_service_state: >-2
{{ (docker_state == 'present') | ternary('started', 'stopped') }}
docker_systemd_service_enabled: "{{ (docker_state == 'present') }}"

@ -0,0 +1,6 @@
---
- name: Restart docker daemon
ansible.builtin.systemd_service:
name: "{{ docker_systemd_service_name }}"
state: "restarted"
listen: "docker-restart"

@ -0,0 +1,18 @@
---
- name: Ensure config directory '{{ docker_daemon_config_file | dirname }}' is present
ansible.builtin.file:
path: "{{ docker_daemon_config_file | dirname }}"
state: "directory"
mode: "0755"
owner: "{{ docker_daemon_config_owner }}"
group: "{{ docker_daemon_config_group }}"
- name: Configure docker daemon using '{{ docker_daemon_config_file }}'
ansible.builtin.copy:
content: "{{ docker_daemon_config | to_nice_json }}"
dest: "{{ docker_daemon_config_file }}"
mode: "{{ docker_daemon_config_file_mode }}"
owner: "{{ docker_daemon_config_owner }}"
group: "{{ docker_daemon_config_group }}"
when: docker_daemon_config | string | length > 0
notify: docker-restart

@ -0,0 +1,30 @@
---
- name: Ensure legacy docker packages are removed
ansible.builtin.apt:
name: "{{ docker_apt_legacy_packages }}"
state: absent
when: docker_remove_legacy_packages
- name: Add apt key for docker repository
ansible.builtin.apt_key:
id: "{{ docker_apt_key_id }}"
url: "{{ docker_apt_key_url }}"
state: "{{ docker_state }}"
- name: Add apt repository for docker
ansible.builtin.apt_repository:
repo: "{{ docker_apt_repository }}"
state: "{{ docker_state }}"
register: docker_apt_repository_info
- name: Update apt cache if repository was newly added
ansible.builtin.apt:
update_cache: true
when:
- docker_state == 'present'
- docker_apt_repository_info.changed
- name: Install apt packages for docker
ansible.builtin.apt:
name: "{{ docker_apt_packages }}"
state: "{{ docker_state }}"

@ -0,0 +1,21 @@
---
- name: Ensure legacy docker packages are removed
ansible.builtin.dnf:
name: "{{ docker_fedora_legacy_packages }}"
state: "removed"
when: docker_remove_legacy_packages
- name: Ensure dnf repository for docker is {{ docker_state }}
ansible.builtin.yum_repository:
name: "{{ docker_fedora_repo_name }}"
description: "{{ docker_fedora_repo_description }}"
baseurl: "{{ docker_fedora_repo_url }}"
validate_certs: "{{ docker_fedora_repo_validate_certs }}"
gpgkey: "{{ docker_fedora_repo_gpg_key }}"
gpgcheck: "{{ docker_fedora_repo_gpg_check }}"
state: "{{ docker_state }}"
- name: Install dnf packages for docker
ansible.builtin.dnf:
name: "{{ docker_fedora_packages }}"
state: "{{ docker_state }}"

@ -0,0 +1,29 @@
---
- name: Check if target OS is supported
ansible.builtin.fail:
msg: >-2
OS '{{ docker_os }}' is not supported!
when: docker_os not in docker_supported_oses
vars:
docker_os: "{{ ansible_distribution | lower }}"
- name: Ensure docker is {{ docker_state }} on {{ ansible_distribution }}
ansible.builtin.include_tasks:
file: "install-{{ ansible_distribution | lower }}.yml"
- name: Configure docker daemon
ansible.builtin.include_tasks:
file: "configure.yml"
when: docker_state == 'present'
- name: Ensure docker daemon is {{ docker_systemd_service_enabled | ternary('enabled', 'disabled') }}
ansible.builtin.systemd_service:
name: "{{ docker_systemd_service_name }}"
enabled: "{{ docker_systemd_service_enabled }}"
when: ansible_facts['service_mgr'] == 'systemd'
- name: Ensure docker daemon is {{ docker_systemd_service_state }}
ansible.builtin.systemd_service:
name: "{{ docker_systemd_service_name }}"
state: "{{ docker_systemd_service_state }}"
when: ansible_facts['service_mgr'] == 'systemd'

@ -0,0 +1,4 @@
---
docker_supported_oses:
- 'debian'
- 'fedora'

@ -1,6 +1,6 @@
---
lego_user: "lego"
lego_version: "4.22.2"
lego_version: "4.23.0"
lego_instance: default
lego_base_path: "/opt/lego"
lego_cert_user: "acme-{{ lego_instance }}"

@ -25,35 +25,44 @@
- "{{ lego_cert_group }}"
append: true
- name: Ensure lego is installed
block:
- name: Check if lego is present
ansible.builtin.command:
cmd: which lego
changed_when: false
failed_when: false
register: lego_binary_info
- name: Check if lego is present
ansible.builtin.command:
cmd: which lego
changed_when: false
failed_when: false
register: lego_binary_info
check_mode: false
- name: Check which version of lego is present
ansible.builtin.command:
cmd: "lego --version"
changed_when: false
failed_when: false
register: lego_binary_version_info
when: lego_binary_info.rc == 0
check_mode: false
- name: Ensure lego is installed
when: (lego_binary_info.rc != 0) or (lego_version not in lego_binary_version_info.stdout)
block:
- name: Download lego from source
ansible.builtin.get_url:
url: "{{ lego_release_archive_url }}"
url_username: "{{ lego_release_archive_url_username | default(omit) }}"
url_password: "{{ lego_release_archive_url_password | default(omit) }}"
dest: "{{ lego_release_archive_file_path }}"
when: lego_binary_info.rc != 0
- name: Create folder to uncompress into
ansible.builtin.file:
dest: "{{ lego_release_archive_path }}"
state: directory
when: lego_binary_info.rc != 0
- name: Uncompress lego source archive
ansible.builtin.unarchive:
src: "{{ lego_release_archive_file_path }}"
dest: "{{ lego_release_archive_path }}"
remote_src: true
when: lego_binary_info.rc != 0
ignore_errors: "{{ ansible_check_mode }}"
- name: Ensure lego binary is present in PATH
ansible.builtin.copy:
@ -61,14 +70,7 @@
dest: "/usr/local/bin/lego"
mode: "u+rwx,g+rx,o+rx"
remote_src: true
when: lego_binary_info.rc != 0
- name: Ensure lego is allowed to bind to ports < 1024
community.general.capabilities:
path: "/usr/local/bin/lego"
capability: "cap_net_bind_service+ep"
state: present
when: lego_binary_allow_net_bind_service
ignore_errors: "{{ ansible_check_mode }}"
- name: Ensure intermediate data is gone
ansible.builtin.file:
@ -77,7 +79,13 @@
loop:
- "{{ lego_release_archive_path }}"
- "{{ lego_release_archive_file_path }}"
when: lego_binary_info.rc != 0
- name: Ensure lego is allowed to bind to ports < 1024
community.general.capabilities:
path: "/usr/local/bin/lego"
capability: "cap_net_bind_service+ep"
state: present
when: lego_binary_allow_net_bind_service
- name: Ensure lego base path exists
ansible.builtin.file:
@ -99,6 +107,7 @@
{{ entry.key }}={{ entry.value }}
{% endfor %}
dest: "{{ lego_base_path }}/{{ lego_instance }}.conf"
register: lego_env_file_info
- name: Ensure timer unit is templated
ansible.builtin.template:
@ -112,6 +121,7 @@
src: "lego_run.sh"
dest: "{{ lego_base_path }}/run.sh"
mode: "0755"
register: lego_handler_script_info
- name: Ensure per-instance base path is created
ansible.builtin.file:
@ -151,7 +161,18 @@
name: "{{ lego_systemd_timer_name }}"
state: "started"
- name: Check if certificates are present
ansible.builtin.find:
path: "{{ lego_instance_path }}/certificates"
recurse: false
file_type: "file"
register: lego_certificate_info
- name: Ensure systemd service is started once to obtain the certificate
ansible.builtin.systemd_service:
name: "{{ lego_systemd_service_name }}"
state: "started"
when: >-2
lego_handler_script_info.changed
or lego_env_file_info.changed
or lego_certificate_info.files | default([]) | length == 0

@ -1,17 +1,7 @@
---
minio_user: ~
minio_data_path: /opt/minio
minio_create_user: false
minio_manage_host_filesystem: false
minio_root_username: root
minio_root_password: ~
minio_container_name: minio
minio_container_image_name: docker.io/minio/minio
minio_container_image_tag: latest
minio_container_image_name: "docker.io/minio/minio"
minio_container_image_tag: "RELEASE.2025-04-22T22-12-26Z"
minio_container_image: "{{ minio_container_image_name }}:{{ minio_container_image_tag }}"
minio_container_networks: []
minio_container_ports: []
@ -34,6 +24,8 @@ minio_container_command:
- ":{{ minio_container_listen_port_console }}"
minio_container_restart_policy: "unless-stopped"
minio_container_image_force_source: "{{ (minio_container_image_tag == 'latest')|bool }}"
minio_container_state: >-2
{{ (minio_state == 'present') | ternary('started', 'absent') }}
minio_container_listen_port_api: 9000
minio_container_listen_port_console: 8900

@ -0,0 +1,12 @@
---
minio_user: ~
minio_data_path: /opt/minio
minio_create_user: false
minio_manage_host_filesystem: false
minio_root_username: root
minio_root_password: ~
minio_state: present
minio_deployment_method: docker

@ -0,0 +1,29 @@
---
- name: Ensure filesystem mounts ({{ minio_data_path }}) for container volumes are present
ansible.builtin.file:
path: "{{ minio_data_path }}"
state: directory
user: "{{ minio_user|default(omit, True) }}"
group: "{{ minio_user|default(omit, True) }}"
when: minio_manage_host_filesystem
- name: Ensure container image '{{ minio_container_image }}' is {{ minio_state }}
community.docker.docker_image:
name: "{{ minio_container_image }}"
state: "{{ minio_state }}"
source: pull
force_source: "{{ minio_container_image_force_source }}"
- name: Ensure container '{{ minio_container_name }}' is {{ minio_container_state }}
community.docker.docker_container:
name: "{{ minio_container_name }}"
image: "{{ minio_container_image }}"
volumes: "{{ minio_container_volumes }}"
env: "{{ minio_container_env }}"
labels: "{{ minio_container_labels }}"
networks: "{{ minio_container_networks }}"
ports: "{{ minio_container_ports }}"
user: "{{ minio_user|default(omit, True) }}"
command: "{{ minio_container_command }}"
restart_policy: "{{ minio_container_restart_policy }}"
state: "{{ minio_container_state }}"

@ -1,37 +1,25 @@
---
- name: Ensure 'minio_state' is valid
ansible.builtin.fail:
msg: >-
Unsupported state '{{ minio_state }}'!
Supported states are {{ minio_states | join(', ') }}.
when: minio_state not in minio_states
- name: Ensure minio run user is present
user:
- name: Ensure 'minio_deployment_method' is valid
ansible.builtin.fail:
msg: >-
Unsupported state '{{ minio_deployment_method }}'!
Supported states are {{ minio_deployment_methods | join(', ') }}.
when: minio_deployment_method not in minio_deployment_methods
- name: Ensure minio run user is {{ minio_state }}
ansible.builtin.user:
name: "{{ minio_user }}"
state: present
system: yes
state: "{{ minio_state }}"
system: true
when: minio_create_user
- name: Ensure filesystem mounts ({{ minio_data_path }}) for container volumes are present
file:
path: "{{ minio_data_path }}"
state: directory
user: "{{ minio_user|default(omit, True) }}"
group: "{{ minio_user|default(omit, True) }}"
when: minio_manage_host_filesystem
- name: Ensure container image for minio is present
community.docker.docker_image:
name: "{{ minio_container_image }}"
state: present
source: pull
force_source: "{{ minio_container_image_force_source }}"
- name: Ensure container {{ minio_container_name }} is running
docker_container:
name: "{{ minio_container_name }}"
image: "{{ minio_container_image }}"
volumes: "{{ minio_container_volumes }}"
env: "{{ minio_container_env }}"
labels: "{{ minio_container_labels }}"
networks: "{{ minio_container_networks }}"
ports: "{{ minio_container_ports }}"
user: "{{ minio_user|default(omit, True) }}"
command: "{{ minio_container_command }}"
restart_policy: "{{ minio_container_restart_policy }}"
state: started
- name: Deploy minio using {{ minio_deployment_method }}
ansible.builtin.include_tasks:
file: "deploy-{{ minio_deployment_method }}.yml"

@ -1,5 +1,9 @@
---
minio_states:
- present
- absent
minio_deployment_methods:
- docker
minio_container_volumes: "{{ minio_container_base_volumes + minio_container_extra_volumes }}"
minio_container_env: "{{ minio_container_base_env | combine(minio_container_extra_env) }}"

4
roles/mosh/README.md Normal file

@ -0,0 +1,4 @@
# `finallycoffee.base.mosh`
Installs [`mosh`](https://mosh.org/#), a remote 'mobile shell' which supports
roaming and re-uses SSH for the authentication layer.

@ -0,0 +1,2 @@
---
mosh_state: present

@ -0,0 +1,15 @@
---
mosh_debian_packages:
- "mosh"
- "openssh-server"
mosh_fedora_packages:
- "mosh"
- "openssh-server"
mosh_archlinux_packages:
- "mosh"
- "openssh"
mosh_packages:
debian: "{{ mosh_debian_packages }}"
fedora: "{{ mosh_fedora_packages }}"
archlinux: "{{ mosh_archlinux_packages }}"

@ -0,0 +1,30 @@
---
- name: Ensure mosh is {{ mosh_state }} (dnf)
ansible.builtin.dnf:
name: "{{ mosh_packages[_key] }}"
state: "{{ mosh_state }}"
when:
- ansible_facts['pkg_mgr'] in ['dnf', 'dnf5']
- _key in mosh_packages.keys()
vars:
_key: "{{ ansible_distribution | lower }}"
- name: Ensure mosh is {{ mosh_state }} (apt)
ansible.builtin.apt:
package: "{{ mosh_packages[_key] }}"
state: "{{ mosh_state }}"
when:
- ansible_facts['pkg_mgr'] in ['apt']
- _key in mosh_packages.keys()
vars:
_key: "{{ ansible_distribution | lower }}"
- name: Ensure mosh is {{ mosh_state }} (pacman)
community.general.pacman:
name: "{{ mosh_packages[_key] }}"
state: "{{ mosh_state }}"
when:
- ansible_facts['pkg_mgr'] in ['pacman']
- _key in mosh_packages.keys()
vars:
_key: "{{ ansible_distribution | lower }}"

11
roles/mosh/tasks/main.yml Normal file

@ -0,0 +1,11 @@
---
- name: Ensure 'mosh_state' is valid
ansible.builtin.fail:
msg: >-2
Invalid state '{{ mosh_state }}' for 'mosh_state'!
Allowed states are {{ mosh_states | join(', ') }}.
when: mosh_state not in mosh_states
- name: Ensure mosh is {{ mosh_state }}
ansible.builtin.include_tasks:
file: "install.yml"

4
roles/mosh/vars/main.yml Normal file

@ -0,0 +1,4 @@
---
mosh_states:
- "present"
- "absent"

@ -1,5 +1,5 @@
---
nginx_version: "1.27.4"
nginx_version: "1.28.0"
nginx_flavour: alpine
nginx_base_path: /opt/nginx
nginx_config_file: "{{ nginx_base_path }}/nginx.conf"

13
roles/openssh/README.md Normal file

@ -0,0 +1,13 @@
# `finallycoffee.base.openssh`
Ansible role to manage and configure openssh and it's components (like `sshd`).
Currently supports `fedora` and `debian` linux distributions.
## `sshd`
To configure `sshd`, see the [`defaults/main/sshd.yml`](defaults/main/sshd.yml),
where snake\_cased config keys for `/etc/ssh/sshd_config` are available in
the `openssh_sshd_config_` namespace.
To add your own config on top, simply use key-value syntax in `openssh_sshd_config`.

@ -0,0 +1,3 @@
---
openssh_state: 'present'
openssh_sshd_config_file: "/etc/ssh/sshd_config"

@ -0,0 +1,8 @@
---
openssh_packages:
fedora: "{{ openssh_fedora_packages }}"
debian: "{{ openssh_debian_packages }}"
openssh_fedora_packages:
- "openssh-server"
openssh_debian_packages:
- "openssh-server"

@ -0,0 +1,33 @@
---
openssh_sshd_enable: true
openssh_sshd_config_pubkey_authentication: true
openssh_sshd_config_password_authentication: false
openssh_sshd_config_challenge_response_authentication: false
openssh_sshd_config_permit_root_login: false
# Limits
openssh_sshd_config_max_sessions: ~
openssh_sshd_config_max_startups: ~
# Hardening
openssh_sshd_config_protocol: 2
openssh_sshd_config_x11_forwarding: false
openssh_sshd_config_allow_agent_forwarding: false
openssh_sshd_config_allow_tcp_forwarding: false
openssh_sshd_default_config:
PubkeyAuthentication: "{{ openssh_sshd_config_pubkey_authentication }}"
PasswordAuthentication: "{{ openssh_sshd_config_password_authentication }}"
ChallengeResponseAuthentication: >-2
{{ openssh_sshd_config_challenge_response_authentication }}
PermitRootLogin: "{{ openssh_sshd_config_permit_root_login }}"
MaxSessions: "{{ openssh_sshd_config_max_sessions }}"
MaxStartups: "{{ openssh_sshd_config_max_startups }}"
Protocol: "{{ openssh_sshd_config_protocol }}"
X11Forwarding: "{{ openssh_sshd_config_x11_forwarding }}"
AllowAgentForwarding: "{{ openssh_sshd_config_allow_agent_forwarding }}"
AllowTcpForwarding: "{{ openssh_sshd_config_allow_tcp_forwarding }}"
openssh_sshd_merged_config: >-2
{{ openssh_sshd_default_config | default({}, true)
| combine(openssh_sshd_config | default({}, true)) }}

@ -0,0 +1,2 @@
---
openssh_sshd_systemd_service_name: "sshd.service"

@ -0,0 +1,7 @@
---
- name: Ensure sshd is reloaded
ansible.builtin.systemd_service:
name: "{{ openssh_sshd_systemd_service_name }}"
state: "reloaded"
when: ansible_facts['service_mgr'] == 'systemd'
listen: openssh_sshd_reload

@ -0,0 +1,28 @@
---
- name: Configure sshd
ansible.builtin.lineinfile:
path: "{{ openssh_sshd_config_file }}"
regexp: "{{ openssh_sshd_config_regexp }}"
line: "{{ openssh_sshd_config_line }}"
firstmatch: true
state: present
validate: "sshd -Tf %s"
loop: "{{ openssh_sshd_merged_config | dict2items }}"
loop_control:
loop_var: "tuple"
label: "{{ tuple.key }}"
notify:
- openssh_sshd_reload
vars:
openssh_sshd_config_regexp: "^\\s*#?\\s*{{ tuple.key }}"
openssh_sshd_config_line: >-2
{{ openssh_sshd_config_line_commented }}{{ tuple.key }} {{ openssh_sshd_config_value }}
openssh_sshd_config_value_is_none: "{{ tuple.value is none }}"
openssh_sshd_config_line_commented: >-2
{{ openssh_sshd_config_value_is_none | ternary('#', '') }}
openssh_sshd_config_value: >-2
{{ (tuple.value is boolean) | ternary(
tuple.value | ternary('yes', 'no'),
tuple.value
)
}}

@ -0,0 +1,16 @@
---
- name: Ensure openssh server package is {{ openssh_state }} (dnf)
ansible.builtin.dnf:
name: "{{ openssh_packages[ansible_distribution | lower] }}"
state: "{{ openssh_state }}"
when:
- ansible_facts['pkg_mgr'] in ['dnf', 'dnf5']
- ansible_distribution | lower in openssh_packages.keys()
- name: Ensure openssh server package is {{ openssh_state }} (apt)
ansible.builtin.apt:
package: "{{ openssh_packages[ansible_distribution | lower] }}"
state: "{{ openssh_state }}"
when:
- ansible_facts['pkg_mgr'] in ['apt']
- ansible_distribution | lower in openssh_packages.keys()

@ -0,0 +1,15 @@
---
- name: Ensure 'openssh_state' is valid
ansible.builtin.fail:
msg: >-2
Invalid value '{{ openssh_state }}' for 'openssh_state'.
Valid values are {{ openssh_states | join(', ') }}!
when: openssh_state not in openssh_states
- name: Ensure openssh is {{ openssh_state }}
ansible.builtin.include_tasks:
file: "install.yml"
- name: Ensure sshd is configured
ansible.builtin.include_tasks:
file: "configure-sshd.yml"

@ -0,0 +1,4 @@
---
openssh_states:
- "present"
- "absent"

@ -1,4 +1,5 @@
---
restic_repo_url: ~
restic_repo_password: ~
restic_s3_key_id: ~
@ -7,8 +8,6 @@ restic_s3_access_key: ~
restic_backup_paths: []
restic_backup_stdin_command: ~
restic_backup_stdin_command_filename: ~
restic_backup_generate_metrics_command: >-2
{{ restic_script_generate_snapshot_metrics }}
restic_policy_keep_all_within: 1d
restic_policy_keep_hourly: 12
@ -19,16 +18,13 @@ restic_policy_keep_yearly: 5
restic_policy_backup_frequency: hourly
restic_base_environment:
RESTIC_REPOSITORY: "{{ restic_repo_url }}"
RESTIC_PASSWORD: "{{ restic_repo_password }}"
RESTIC_JOBNAME: "{{ restic_job_name }}"
RESTIC_JOBNAME: "{{ restic_job_name | default('unknown') }}"
RESTIC_FORGET_KEEP_WITHIN: "{{ restic_policy_keep_all_within }}"
RESTIC_FORGET_KEEP_HOURLY: "{{ restic_policy_keep_hourly }}"
RESTIC_FORGET_KEEP_DAILY: "{{ restic_policy_keep_daily }}"
RESTIC_FORGET_KEEP_WEEKLY: "{{ restic_policy_keep_weekly }}"
RESTIC_FORGET_KEEP_MONTHLY: "{{ restic_policy_keep_monthly }}"
RESTIC_FORGET_KEEP_YEARLY: "{{ restic_policy_keep_yearly }}"
RESTIC_GENERATE_SNAPSHOT_METRICS_COMMAND: "{{ restic_backup_generate_metrics_command }}"
restic_s3_environment:
AWS_ACCESS_KEY_ID: "{{ restic_s3_key_id }}"
@ -37,8 +33,8 @@ restic_s3_environment:
restic_complete_environment: >-
{{
restic_base_environment
| combine((restic_s3_environment | default({}))
if (restic_s3_key_id and restic_s3_access_key) else {})
| combine((restic_s3_environment
if (restic_s3_key_id and restic_s3_access_key) else {}) | default({}))
| combine(restic_environment | default({}))
}}
@ -50,3 +46,15 @@ restic_policy:
monthly: "{{ restic_policy_keep_monthly }}"
yearly: "{{ restic_policy_keep_yearly }}"
frequency: "{{ restic_policy_backup_frequency }}"
restic_user: root
restic_create_user: false
restic_start_job_on_unit_change: false
restic_job_name: ~
restic_job_description: "Restic backup job for {{ restic_job_name }}"
restic_systemd_unit_naming_scheme: "restic.{{ restic_job_name }}"
restic_systemd_working_directory: /tmp
restic_systemd_syslog_identifier: "restic-{{ restic_job_name }}"
restic_package_name: restic

@ -1,16 +0,0 @@
---
restic_user: root
restic_user_create: false
restic_create_user: "{{ restic_user_create }}"
restic_user_create_home: false
restic_user_system: false
restic_state: present
restic_version: "0.17.3"
restic_job_name: default
restic_job_state: "{{ restic_state }}"
restic_job_directory: "/etc/restic"
restic_package_name: restic
restic_script_generate_snapshot_metrics: "/opt/restic-generate-snapshot-metrics.sh"
restic_start_job_on_unit_change: true

@ -1,20 +0,0 @@
---
restic_systemd_job_description: "Restic backup service"
restic_systemd_unit_naming_scheme: "restic-{{ restic_job_name }}"
restic_systemd_timer_naming_scheme: >-2
{{ restic_systemd_unit_naming_scheme }}.timer
restic_systemd_timer_state_map:
present: "started"
absent: "stopped"
masked: "started"
restic_systemd_timer_state: >-2
{{ restic_systemd_timer_state_map[restic_job_state] }}
restic_systemd_syslog_identifier: "restic@%i"
restic_systemd_working_directory: /tmp
restic_systemd_install_wanted_by: "basic.target"
restic_systemd_install_default_instance: "default"
restic_systemd_start_job_on_unit_change: false
restic_systemd_service_exec_start: "/opt/restic-backup.sh"

@ -6,21 +6,17 @@ if [[ -n ${RESTIC_PRE_BACKUP_HOOK-} ]]; then
/bin/bash -c "$RESTIC_PRE_BACKUP_HOOK"
fi
echo "List existing snapshots or initialize repository"
restic snapshots || restic init
echo "List existing snapshots or attempt to initialize/unlock repository"
restic snapshots || restic init || restic unlock
sleep 1;
echo "Attempting to remove lock if present"
restic unlock
sleep 1;
sleep 2
echo "Start backup on ${@:1}"
restic --verbose --retry-lock=${RESTIC_RETRY_LOCK:-5m} backup "${@:1}"
sleep 1;
if [[ -n ${RESTIC_POIST_BACKUP_HOOK-} ]]; then
/bin/bash -c "$RESTIC_POST_BACKUP_HOOK"
fi
sleep 2
echo "Forget and prune old snapshots"
restic forget --prune --retry-lock=${RESTIC_RETRY_LOCK:-5m} \
@ -33,11 +29,9 @@ restic forget --prune --retry-lock=${RESTIC_RETRY_LOCK:-5m} \
sleep 2
echo "Generate snapshot metrics"
if [[ -n ${RESTIC_GENERATE_SNAPSHOT_METRICS_COMMAND-} ]]; then
restic --json snapshots | ${RESTIC_GENERATE_SNAPSHOT_METRICS_COMMAND} \
> /var/lib/node_exporter/restic-snapshots-${RESTIC_JOBNAME:-unknown}.prom-src
sleep 2;
fi
restic --json snapshots | /opt/restic-generate-snapshot-metrics.sh \
> /var/lib/node_exporter/restic-snapshots-${RESTIC_JOBNAME:-unknown}.prom-src
sleep 2
echo "Check repository"
restic check

@ -6,7 +6,7 @@ echo $RESTIC_JSON | jq -r '.[]
| {
"hostname": .hostname,
"username": .username,
"short_id": .short_id,
"short_id": .short_id,
"time": ((((.time | split(".")[0]) + "Z") | fromdate) - (3600 * (.time | split("+")[1] | split(":")[0] | tonumber + 1))),
"paths": .paths[]
} | "restic_snapshots{hostname=\"\(.hostname)\",username=\"\(.username)\",short_id=\"\(.short_id)\",paths=\"\(.paths)\"} \(.time)"'

@ -1,12 +1,13 @@
---
- name: Ensure system daemon is reloaded
listen: reload-systemd
ansible.builtin.systemd:
systemd:
daemon_reload: true
- name: Ensure systemd service for '{{ restic_job_name }}' is started immediately
listen: trigger-restic
ansible.builtin.systemd:
name: "{{ restic_systemd_timer_naming_scheme }}"
systemd:
name: "{{ restic_systemd_unit_naming_scheme }}.service"
state: started
when: (not ansible_check_mode) and restic_start_job_on_unit_change
when: restic_start_job_on_unit_change

@ -1,35 +0,0 @@
---
- name: Check if 'restic_state' is valid
ansible.builtin.fail:
msg: >-2
Unknown value '{{ restic_state }}' for 'restic_state'!
Supported values are {{ restic_states | join(', ') }}
when: restic_state not in restic_states
- name: Ensure 'restic_job_name' is properly populated
ansible.builtin.fail:
msg: >-2
Unsupported restic_job_name '{{ restic_job_name | string }}'!
when:
- not (restic_job_name | string | length > 0)
- name: Ensure either backup_paths or backup_stdin_command is populated
ansible.builtin.fail:
msg: >-2
Setting both `restic_backup_paths` and `restic_backup_stdin_command`
is not supported!
when: restic_backup_paths|length > 0 and restic_backup_stdin_command and false
- name: Ensure a filename for stdin_command backup is given
ansible.builtin.fail:
msg: >-2
`restic_backup_stdin_command` was set but no filename for the resulting
output was supplied in `restic_backup_stdin_command_filename`.
when: restic_backup_stdin_command and not restic_backup_stdin_command_filename
- name: Ensure backup frequency adheres to systemd's OnCalender syntax
command:
cmd: "systemd-analyze calendar {{ restic_policy.frequency }}"
register: systemd_calender_parse_res
failed_when: systemd_calender_parse_res.rc != 0
changed_when: false

@ -1,30 +0,0 @@
---
- name: Ensure systemd timer file for '{{ restic_job_name }}' is {{ restic_state }}'
ansible.builtin.template:
dest: "/etc/systemd/system/{{ restic_systemd_unit_naming_scheme }}.timer"
src: restic.timer.j2
owner: root
group: root
mode: "0640"
when: restic_state == 'present'
register: restic_systemd_timer_info
notify:
- reload-systemd
- name: Ensure restic configuration for '{{ restic_job_name }}' is {{ restic_job_state }}
ansible.builtin.template:
src: "restic.conf.j2"
dest: "{{ restic_job_directory }}/{{ restic_job_name }}.conf"
mode: "0640"
when: restic_job_state in ['present', 'masked']
notify:
- trigger-restic
- name: Ensure restic configuration for '{{ restic_job_name }}' is {{ restic_job_state }}
ansible.builtin.file:
path: "{{ restic_job_directory }}/{{ restic_job_name }}.conf"
state: "{{ restic_job_state }}"
when: restic_job_state not in ['present', 'masked']
- name: Flush handlers to ensure systemd knows about '{{ restic_job_name }}'
meta: flush_handlers

@ -1,48 +0,0 @@
---
- name: Ensure restic is installed
block:
- name: Ensure restic is installed via apt
apt:
package: restic
state: latest
when: ansible_os_family == 'Debian'
- name: Ensure restic is installed via dnf
dnf:
name: restic
state: latest
when: ansible_os_family == 'RedHat'
- name: Ensure restic is installed using the auto-detected package-manager
package:
name: "{{ restic_package_name }}"
state: present
when: ansible_os_family not in ['RedHat', 'Debian']
- name: Ensure restic backup scripts are {{ restic_state }}
ansible.builtin.copy:
src: "{{ script.source }}"
dest: "{{ script.destination }}"
mode: "{{ script.mode }}"
loop:
- source: restic-backup.sh
destination: "{{ restic_systemd_service_exec_start }}"
mode: "0510"
- source: restic-snapshot-metrics.sh
destination: "{{ restic_script_generate_snapshot_metrics }}"
mode: "0510"
loop_control:
loop_var: script
label: "{{ script.source }}"
- name: Ensure systemd service file for restic template unit is {{ restic_state }}
ansible.builtin.template:
dest: "/etc/systemd/system/restic@.service"
src: "restic@.service.j2"
owner: root
group: root
mode: "0640"
when: restic_state == 'present'
notify:
- reload-systemd
- trigger-restic

@ -1,39 +1,77 @@
---
- name: Check if role input is valid
ansible.builtin.include_tasks:
file: check.yml
- name: Ensure restic is {{ restic_state }}
ansible.builtin.include_tasks:
file: install.yml
- name: Ensure restic user '{{ restic_user }}' is {{ restic_state }}
ansible.builtin.user:
- name: Ensure {{ restic_user }} system user exists
user:
name: "{{ restic_user }}"
state: "{{ restic_state }}"
system: "{{ restic_user_system }}"
create_home: "{{ restic_user_create_home }}"
state: present
system: true
when: restic_create_user
- name: Ensure restic configuration for job is {{ restic_job_state }}
ansible.builtin.include_tasks:
file: "configure.yml"
- name: Ensure either backup_paths or backup_stdin_command is populated
when: restic_backup_paths|length > 0 and restic_backup_stdin_command and false
fail:
msg: "Setting both `restic_backup_paths` and `restic_backup_stdin_command` is not supported"
- name: Ensure a filename for stdin_command backup is given
when: restic_backup_stdin_command and not restic_backup_stdin_command_filename
fail:
msg: "`restic_backup_stdin_command` was set but no filename for the resulting output was supplied in `restic_backup_stdin_command_filename`"
- name: Ensure backup frequency adheres to systemd's OnCalender syntax
command:
cmd: "systemd-analyze calendar {{ restic_policy.frequency }}"
register: systemd_calender_parse_res
failed_when: systemd_calender_parse_res.rc != 0
changed_when: false
- name: Ensure restic is installed
block:
- name: Ensure restic is installed via apt
apt:
package: restic
state: latest
when: ansible_os_family == 'Debian'
- name: Ensure restic is installed via dnf
dnf:
name: restic
state: latest
when: ansible_os_family == 'RedHat'
- name: Ensure restic is installed using the auto-detected package-manager
package:
name: "{{ restic_package_name }}"
state: present
when: ansible_os_family not in ['RedHat', 'Debian']
- name: Ensure systemd service file for '{{ restic_job_name }}' is templated
template:
dest: "/etc/systemd/system/{{ restic_systemd_unit_naming_scheme }}.service"
src: restic.service.j2
owner: root
group: root
mode: 0640
notify:
- reload-systemd
- trigger-restic
- name: Ensure systemd service file for '{{ restic_job_name }}' is templated
template:
dest: "/etc/systemd/system/{{ restic_systemd_unit_naming_scheme }}.timer"
src: restic.timer.j2
owner: root
group: root
mode: 0640
notify:
- reload-systemd
- name: Flush handlers to ensure systemd knows about '{{ restic_job_name }}'
meta: flush_handlers
- name: Ensure systemd timer for '{{ restic_job_name }}' is activated
ansible.builtin.systemd:
name: "{{ restic_systemd_timer_naming_scheme }}"
systemd:
name: "{{ restic_systemd_unit_naming_scheme }}.timer"
enabled: true
when:
- restic_systemd_timer_info.changed
- not restic_systemd_timer_info.failed
- not ansible_check_mode
- name: Ensure systemd timer for '{{ restic_job_name }}' is {{ restic_job_state }}
ansible.builtin.systemd:
name: "{{ restic_systemd_timer_naming_scheme }}"
state: "{{ restic_job_state }}"
masked: "{{ (restic_job_state == 'masked') | ternary('true', omit) }}"
when:
- restic_systemd_timer_info.changed
- not restic_systemd_timer_info.failed
- not ansible_check_mode
- name: Ensure systemd timer for '{{ restic_job_name }}' is started
systemd:
name: "{{ restic_systemd_unit_naming_scheme }}.timer"
state: started

@ -1,3 +0,0 @@
{% for kv in restic_complete_environment | dict2items %}
{{ kv.key }}={{ kv.value }}
{% endfor %}

@ -0,0 +1,51 @@
[Unit]
Description={{ restic_job_description }}
[Service]
Type=simple
User={{ restic_user }}
WorkingDirectory={{ restic_systemd_working_directory }}
SyslogIdentifier={{ restic_systemd_syslog_identifier }}
Environment=RESTIC_REPOSITORY={{ restic_repo_url }}
Environment=RESTIC_PASSWORD={{ restic_repo_password }}
{% for kv in restic_complete_environment | dict2items %}
Environment={{ kv.key }}={{ kv.value }}
{% endfor %}
{% if restic_init | default(true) %}
ExecStartPre=-/bin/sh -c '/usr/bin/restic snapshots || /usr/bin/restic init'
{% endif %}
{% if restic_unlock_before_backup | default(false) %}
ExecStartPre=-/bin/sh -c 'sleep 3 && /usr/bin/restic unlock'
{% endif %}
{% if restic_backup_pre_hook | default(false) %}
ExecStartPre=-{{ restic_backup_pre_hook }}
{% endif %}
{% if restic_backup_stdin_command %}
ExecStart=/bin/sh -c '{{ restic_backup_stdin_command }} | /usr/bin/restic backup \
--retry-lock {{ restic_retry_lock | default('5m') }} \
--verbose --stdin \
--stdin-filename {{ restic_backup_stdin_command_filename }}'
{% else %}
ExecStart=/opt/restic-backup-directories.sh {{ restic_backup_paths | join(' ') }}
{% endif %}
{% if restic_forget_prune | default(true) %}
ExecStartPost=/usr/bin/restic forget --prune \
--retry-lock {{ restic_retry_lock | default('5m') }} \
--keep-within={{ restic_policy.keep_within }} \
--keep-hourly={{ restic_policy.hourly }} \
--keep-daily={{ restic_policy.daily }} \
--keep-weekly={{ restic_policy.weekly }} \
--keep-monthly={{ restic_policy.monthly }} \
--keep-yearly={{ restic_policy.yearly }}
{% endif %}
{% if restic_list_snapshots | default(true) %}
ExecStartPost=-/usr/bin/restic snapshots --retry-lock {{ restic_retry_lock | default('5m') }}
{% endif %}
{% if restic_backup_post_hook | default(false) %}
ExecStartPost=-{{ restic_backup_post_hook }}
{% endif %}
{% if restic_check | default(true) %}
ExecStartPost=/usr/bin/restic check --retry-lock {{ restic_retry_lock | default('5m') }}
{% endif %}

@ -1,15 +0,0 @@
[Unit]
Description={{ restic_systemd_job_description }}
[Service]
Type=simple
EnvironmentFile={{ restic_job_directory }}/%i.conf
User={{ restic_user }}
WorkingDirectory={{ restic_systemd_working_directory }}
SyslogIdentifier={{ restic_systemd_syslog_identifier }}
ExecStart={{ restic_systemd_service_exec_start }}
[Install]
WantedBy={{ restic_systemd_install_wanted_by }}
DefaultInstance={{ restic_systemd_install_default_instance }}

@ -1,9 +0,0 @@
---
restic_states:
- "present"
- "absent"
restic_job_states:
- "present"
- "masked"
- "absent"

23
roles/user/README.md Normal file

@ -0,0 +1,23 @@
# `finallycoffee.base.user` ansible role
Provision and manage user accounts on the remote host. Supports setting user
home, gecos (display name) and shell.
Warning: if the users' home exists and is changed, the role will attempt to
move the home directory. Set `move_home` to false on the user to disable this
behaviour.
## Examples
```yaml
- hosts: all
roles:
- role: finallycoffee.base.user
vars:
users:
- name: root
- name: alice
- name: bob
state: present
- name: eve
state: absent
```

@ -0,0 +1,2 @@
---
users: []

@ -0,0 +1,41 @@
---
- name: Ensure user '{{ user.name }}' is {{ user_state }}
ansible.builtin.user:
name: "{{ user.name }}"
state: "{{ user_state }}"
system: "{{ user.system | default(false, true) }}"
shell: "{{ user.shell | default(omit, true) }}"
home: "{{ user.home | default(omit, true) }}"
create_home: "{{ user.create_home | default(true, true) }}"
move_home: "{{ user.move_home | default(true, true) }}"
skeleton: >-2
{{ (user.create_home | default(true, true) and 'skeleton' in user)
| ternary(user.skeleton | default(''), omit) }}
comment: "{{ user.comment | default(user.gecos | default(omit, true), true) }}"
vars:
user_state: "{{ user.state | default('present', false) }}"
- name: Ensure SSH authorized keys for '{{ user.name }}' are {{ user_state }}
vars:
user_state: "{{ user.state | default('present', false) }}"
when:
- user_state == 'present'
- user.authorized_keys | default([]) | length > 0
block:
- name: Ensure .ssh directory for user '{{ user.name }}' exists
ansible.builtin.file:
path: "{{ user.home | default('/home/' + user.name) + '/.ssh' }}"
state: "directory"
owner: "{{ user.name }}"
group: "{{ user.name }}"
mode: "0700"
- name: Ensure key is up to date
ansible.posix.authorized_key:
user: "{{ user.name }}"
state: "{{ key.state | default('present', true) }}"
key: "{{ key.type }} {{ key.key }}"
comment: "{{ user.name }}-{{ key.comment }}"
loop: "{{ user.authorized_keys }}"
loop_control:
loop_var: key
label: "{{ user.name }}-{{ key.comment }}"

@ -0,0 +1,8 @@
---
- name: Ensure users are configured
ansible.builtin.include_tasks:
file: "configure-user.yml"
loop: "{{ users }}"
loop_control:
loop_var: user
label: "{{ user.name }}"