Compare commits
1 Commits
1fe626fad5
...
transcaffe
Author | SHA1 | Date | |
---|---|---|---|
5b47da2bd0
|
@ -11,10 +11,6 @@ concise area of concern.
|
||||
- [`roles/authelia`](roles/authelia/README.md): Deploys an [authelia.com](https://www.authelia.com)
|
||||
instance, an authentication provider with beta OIDC provider support.
|
||||
|
||||
- [`roles/elasticsearch`](roles/elasticsearch/README.md): Deploy [elasticsearch](https://www.docker.elastic.co/r/elasticsearch/elasticsearch-oss),
|
||||
a popular (distributed) search and analytics engine, mostly known by it's
|
||||
letter "E" in the ELK-stack.
|
||||
|
||||
- [`roles/gitea`](roles/gitea/README.md): Deploy [gitea.io](https://gitea.io), a
|
||||
lightweight, self-hosted git service.
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
---
|
||||
|
||||
authelia_version: 4.36.4
|
||||
authelia_version: 4.34.6
|
||||
authelia_user: authelia
|
||||
authelia_base_dir: /opt/authelia
|
||||
authelia_domain: authelia.example.org
|
||||
@ -8,7 +8,6 @@ authelia_domain: authelia.example.org
|
||||
authelia_config_dir: "{{ authelia_base_dir }}/config"
|
||||
authelia_config_file: "{{ authelia_config_dir }}/config.yaml"
|
||||
authelia_data_dir: "{{ authelia_base_dir }}/data"
|
||||
authelia_asset_dir: "{{ authelia_base_dir }}/assets"
|
||||
authelia_sqlite_storage_file: "{{ authelia_data_dir }}/authelia.sqlite3"
|
||||
authelia_notification_storage_file: "{{ authelia_data_dir }}/notifications.txt"
|
||||
authelia_user_storage_file: "{{ authelia_data_dir }}/user_database.yml"
|
||||
@ -43,7 +42,6 @@ authelia_config_default_redirection_url: ~
|
||||
authelia_config_server_host: 0.0.0.0
|
||||
authelia_config_server_port: "{{ authelia_container_listen_port }}"
|
||||
authelia_config_server_path: ""
|
||||
authelia_config_server_asset_path: "/config/assets/"
|
||||
authelia_config_server_read_buffer_size: 4096
|
||||
authelia_config_server_write_buffer_size: 4096
|
||||
authelia_config_server_enable_pprof: true
|
||||
@ -57,8 +55,6 @@ authelia_config_log_level: info
|
||||
authelia_config_log_format: json
|
||||
authelia_config_log_file_path: ~
|
||||
authelia_config_log_keep_stdout: false
|
||||
authelia_config_telemetry_metrics_enabled: false
|
||||
authelia_config_telemetry_metrics_address: '0.0.0.0:9959'
|
||||
authelia_config_totp_disable: true
|
||||
authelia_config_totp_issuer: "{{ authelia_domain }}"
|
||||
authelia_config_totp_algorithm: sha1
|
||||
@ -80,8 +76,8 @@ authelia_config_ntp_version: 4
|
||||
authelia_config_ntp_max_desync: 3s
|
||||
authelia_config_ntp_disable_startup_check: false
|
||||
authelia_config_ntp_disable_failure: false
|
||||
authelia_config_authentication_backend_disable_reset_password: false
|
||||
authelia_config_authentication_backend_refresh_interval: 5m
|
||||
authelia_config_authentication_backend_password_reset_disable: false
|
||||
authelia_config_authentication_backend_password_reset_custom_url: ~
|
||||
authelia_config_authentication_backend_ldap_implementation: custom
|
||||
authelia_config_authentication_backend_ldap_url: ldap://127.0.0.1:389
|
||||
@ -157,7 +153,7 @@ authelia_config_notifier_smtp_timeout: 5s
|
||||
authelia_config_notifier_smtp_sender: "Authelia on {{ authelia_domain }} <admin@{{ authelia_domain }}>"
|
||||
authelia_config_notifier_smtp_identifier: "{{ authelia_domain }}"
|
||||
authelia_config_notifier_smtp_subject: "[Authelia @ {{ authelia_domain }}] {title}"
|
||||
authelia_config_notifier_smtp_startup_check_address: "authelia-test@{{ authelia_domain }}"
|
||||
authelia_config_notifier_smtp_startup_check_address: false
|
||||
authelia_config_notifier_smtp_disable_require_tls: false
|
||||
authelia_config_notifier_smtp_disable_html_emails: false
|
||||
authelia_config_notifier_smtp_tls_skip_verify: false
|
||||
|
@ -14,7 +14,6 @@
|
||||
owner: "{{ item.owner | default(authelia_user) }}"
|
||||
group: "{{ item.group | default(authelia_user) }}"
|
||||
mode: "{{ item.mode | default('0750') }}"
|
||||
when: item.path | default(false, true) | bool
|
||||
loop:
|
||||
- path: "{{ authelia_base_dir }}"
|
||||
mode: "0755"
|
||||
@ -22,8 +21,6 @@
|
||||
mode: "0750"
|
||||
- path: "{{ authelia_data_dir }}"
|
||||
mode: "0750"
|
||||
- path: "{{ authelia_asset_dir }}"
|
||||
mode: "0750"
|
||||
|
||||
- name: Ensure config file is generated
|
||||
copy:
|
||||
|
@ -5,7 +5,6 @@ authelia_run_group: "{{ (authelia_user_info.group) if authelia_user_info is defi
|
||||
|
||||
authelia_container_base_volumes: >-2
|
||||
{{ [ authelia_config_file + ":/config/configuration.yml:ro"]
|
||||
+ ([authelia_asset_dir + '/:' + authelia_config_server_asset_path + ':ro'] if authelia_asset_dir | default(false, true) else [])
|
||||
+ ([ authelia_sqlite_storage_file + ":" + authelia_config_storage_local_path + ":z" ]
|
||||
if authelia_config_storage_local_path | default(false, true) else [])
|
||||
+ ([ authelia_notification_storage_file + ":" + authelia_config_notifier_filesystem_filename + ":z" ]
|
||||
@ -22,7 +21,6 @@ authelia_top_level_config:
|
||||
theme: "{{ authelia_config_theme }}"
|
||||
jwt_secret: "{{ authelia_config_jwt_secret }}"
|
||||
log: "{{ authelia_config_log }}"
|
||||
telemetry: "{{ authelia_config_telemetry }}"
|
||||
totp: "{{ authelia_config_totp }}"
|
||||
webauthn: "{{ authelia_config_webauthn }}"
|
||||
duo_api: "{{ authelia_config_duo_api }}"
|
||||
@ -51,7 +49,6 @@ authelia_config_server: >-2
|
||||
"host": authelia_config_server_host,
|
||||
"port": authelia_config_server_port,
|
||||
"path": authelia_config_server_path,
|
||||
"asset_path": authelia_config_server_asset_path,
|
||||
"read_buffer_size": authelia_config_server_read_buffer_size,
|
||||
"write_buffer_size": authelia_config_server_write_buffer_size,
|
||||
"enable_pprof": authelia_config_server_enable_pprof,
|
||||
@ -75,10 +72,6 @@ authelia_config_log: >-2
|
||||
| combine({"keep_stdout": authelia_config_log_keep_stdout}
|
||||
if authelia_config_log_file_path | default(false, true) else {})
|
||||
}}
|
||||
authelia_config_telemetry:
|
||||
metrics:
|
||||
enabled: "{{ authelia_config_telemetry_metrics_enabled }}"
|
||||
address: "{{ authelia_config_telemetry_metrics_address }}"
|
||||
authelia_config_totp:
|
||||
disable: "{{ authelia_config_totp_disable }}"
|
||||
issuer: "{{ authelia_config_totp_issuer }}"
|
||||
@ -108,6 +101,7 @@ authelia_config_ntp:
|
||||
authelia_config_authentication_backend: >-2
|
||||
{{
|
||||
{
|
||||
"disable_reset_password": authelia_config_authentication_backend_disable_reset_password,
|
||||
"refresh_interval": authelia_config_authentication_backend_refresh_interval,
|
||||
}
|
||||
| combine({"password_reset": authelia_config_authentication_backend_password_reset}
|
||||
@ -118,7 +112,6 @@ authelia_config_authentication_backend: >-2
|
||||
}}
|
||||
authelia_config_authentication_backend_password_reset:
|
||||
custom_url: "{{ authelia_config_authentication_backend_password_reset_custom_url }}"
|
||||
disable: "{{ authelia_config_authentication_backend_password_reset_disable }}"
|
||||
authelia_config_authentication_backend_ldap:
|
||||
implementation: "{{ authelia_config_authentication_backend_ldap_implementation }}"
|
||||
url: "{{ authelia_config_authentication_backend_ldap_url }}"
|
||||
|
@ -1,22 +0,0 @@
|
||||
# `finallycoffee.services.elastiscsearch`
|
||||
|
||||
A simple ansible role which deploys a single-node elastic container to provide
|
||||
an easy way to do some indexing.
|
||||
|
||||
## Usage
|
||||
|
||||
Per default, `/opt/elasticsearch/data` is used to persist data, it is
|
||||
customizable by using either `elasticsearch_base_path` or `elasticsearch_data_path`.
|
||||
|
||||
As elasticsearch be can be quite memory heavy, the maximum amount of allowed RAM
|
||||
can be configured using `elasticsearch_allocated_ram_mb`, defaulting to 512 (mb).
|
||||
|
||||
The cluster name and discovery type can be overridden using
|
||||
`elasticsearch_config_cluster_name` (default: elastic) and
|
||||
`elasticsearch_config_discovery_type` (default: single-node), should one
|
||||
need a multi-node elasticsearch deployment.
|
||||
|
||||
Per default, no ports or networks are mapped, and explizit mapping using
|
||||
either ports (`elasticsearch_container_ports`) or networks
|
||||
(`elasticsearch_container_networks`) is required in order for other services
|
||||
to use elastic.
|
@ -1,35 +0,0 @@
|
||||
---
|
||||
|
||||
elasticsearch_version: 7.10.2
|
||||
|
||||
elasticsearch_base_path: /opt/elasticsearch
|
||||
elasticsearch_data_path: "{{ elasticsearch_base_path }}/data"
|
||||
|
||||
elasticsearch_config_cluster_name: elastic
|
||||
elasticsearch_config_discovery_type: single-node
|
||||
elasticsearch_config_boostrap_memory_lock: true
|
||||
elasticsearch_allocated_ram_mb: 512
|
||||
|
||||
elasticsearch_container_image_name: docker.elastic.co/elasticsearch/elasticsearch-oss
|
||||
elasticsearch_container_image_tag: ~
|
||||
elasticsearch_container_image: >-
|
||||
{{ elasticsearch_container_image_name }}:{{ elasticsearch_container_image_tag | default(elasticsearch_version, true) }}
|
||||
|
||||
elasticsearch_container_name: elasticsearch
|
||||
elasticsearch_container_env:
|
||||
"ES_JAVA_OPTS": "-Xms{{ elasticsearch_allocated_ram_mb }}m -Xmx{{ elasticsearch_allocated_ram_mb }}m"
|
||||
"cluster.name": "{{ elasticsearch_config_cluster_name }}"
|
||||
"discovery.type": "{{ elasticsearch_config_discovery_type }}"
|
||||
"bootstrap.memory_lock": "{{ 'true' if elasticsearch_config_boostrap_memory_lock else 'false' }}"
|
||||
elasticsearch_container_user: ~
|
||||
elasticsearch_container_ports: ~
|
||||
elasticsearch_container_labels:
|
||||
version: "{{ elasticsearch_version }}"
|
||||
elasticsearch_container_ulimits:
|
||||
# - "memlock:{{ (1.5 * 1024 * elasticsearch_allocated_ram_mb) | int }}:{{ (1.5 * 1024 * elasticsearch_allocated_ram_mb) | int }}"
|
||||
- "memlock:-1:-1"
|
||||
elasticsearch_container_volumes:
|
||||
- "{{ elasticsearch_data_path }}:/usr/share/elasticsearch/data:z"
|
||||
elasticsearch_container_networks: ~
|
||||
elasticsearch_container_purge_networks: ~
|
||||
elasticsearch_container_restart_policy: unless-stopped
|
@ -1,32 +0,0 @@
|
||||
---
|
||||
|
||||
- name: Ensure host directories are present
|
||||
file:
|
||||
path: "{{ item }}"
|
||||
state: directory
|
||||
mode: "0777"
|
||||
loop:
|
||||
- "{{ elasticsearch_base_path }}"
|
||||
- "{{ elasticsearch_data_path }}"
|
||||
|
||||
- name: Ensure elastic container image is present
|
||||
docker_image:
|
||||
name: "{{ elasticsearch_container_image }}"
|
||||
state: present
|
||||
source: pull
|
||||
force_source: "{{ elasticsearch_container_image_tag|default(false, true)|bool }}"
|
||||
|
||||
- name: Ensure elastic container is running
|
||||
docker_container:
|
||||
name: "{{ elasticsearch_container_name }}"
|
||||
image: "{{ elasticsearch_container_image }}"
|
||||
env: "{{ elasticsearch_container_env | default(omit, True) }}"
|
||||
user: "{{ elasticsearch_container_user | default(omit, True) }}"
|
||||
ports: "{{ elasticsearch_container_ports | default(omit, True) }}"
|
||||
labels: "{{ elasticsearch_container_labels | default(omit, True) }}"
|
||||
volumes: "{{ elasticsearch_container_volumes }}"
|
||||
ulimits: "{{ elasticsearch_container_ulimits }}"
|
||||
networks: "{{ elasticsearch_container_networks | default(omit, True) }}"
|
||||
purge_networks: "{{ elasticsearch_container_purge_networks | default(omit, True) }}"
|
||||
restart_policy: "{{ elasticsearch_container_restart_policy }}"
|
||||
state: started
|
@ -1,6 +1,6 @@
|
||||
---
|
||||
|
||||
gitea_version: "1.17.0"
|
||||
gitea_version: "1.16.4"
|
||||
gitea_user: git
|
||||
gitea_base_path: "/opt/gitea"
|
||||
gitea_data_path: "{{ gitea_base_path }}/data"
|
||||
|
@ -1,7 +1,6 @@
|
||||
---
|
||||
|
||||
jellyfin_user: jellyfin
|
||||
jellyfin_version: 10.8.1
|
||||
|
||||
jellyfin_base_path: /opt/jellyfin
|
||||
jellyfin_config_path: "{{ jellyfin_base_path }}/config"
|
||||
@ -11,13 +10,11 @@ jellyfin_media_volumes: []
|
||||
|
||||
jellyfin_container_name: jellyfin
|
||||
jellyfin_container_image_name: "docker.io/jellyfin/jellyfin"
|
||||
jellyfin_container_image_tag: ~
|
||||
jellyfin_container_image_ref: "{{ jellyfin_container_image_name }}:{{ jellyfin_container_image_tag | default(jellyfin_version, true) }}"
|
||||
jellyfin_container_image_tag: "latest"
|
||||
jellyfin_container_image_ref: "{{ jellyfin_container_image_name }}:{{ jellyfin_container_image_tag }}"
|
||||
jellyfin_container_network_mode: host
|
||||
jellyfin_container_networks: ~
|
||||
jellyfin_container_volumes: "{{ jellyfin_container_base_volumes + jellyfin_media_volumes }}"
|
||||
jellyfin_container_labels: "{{ jellyfin_container_base_labels | combine(jellyfin_container_extra_labels) }}"
|
||||
jellyfin_container_extra_labels: {}
|
||||
jellyfin_container_restart_policy: "unless-stopped"
|
||||
|
||||
jellyfin_host_directories:
|
||||
|
@ -21,18 +21,13 @@
|
||||
name: "{{ jellyfin_container_image_ref }}"
|
||||
state: present
|
||||
source: pull
|
||||
force_source: "{{ jellyfin_container_image_tag | default(false, true) }}"
|
||||
register: jellyfin_container_image_pull_result
|
||||
until: jellyfin_container_image_pull_result is succeeded
|
||||
retries: 5
|
||||
delay: 3
|
||||
force_source: "{{ jellyfin_container_image_tag in ['stable', 'unstable'] }}"
|
||||
|
||||
- name: Ensure container '{{ jellyfin_container_name }}' is running
|
||||
docker_container:
|
||||
name: "{{ jellyfin_container_name }}"
|
||||
image: "{{ jellyfin_container_image_ref }}"
|
||||
user: "{{ jellyfin_uid }}:{{ jellyfin_gid }}"
|
||||
labels: "{{ jellyfin_container_labels }}"
|
||||
volumes: "{{ jellyfin_container_volumes }}"
|
||||
networks: "{{ jellyfin_container_networks | default(omit, True) }}"
|
||||
network_mode: "{{ jellyfin_container_network_mode }}"
|
||||
|
@ -3,6 +3,3 @@
|
||||
jellyfin_container_base_volumes:
|
||||
- "{{ jellyfin_config_path }}:/config:z"
|
||||
- "{{ jellyfin_cache_path }}:/cache:z"
|
||||
|
||||
jellyfin_container_base_labels:
|
||||
version: "{{ jellyfin_version }}"
|
||||
|
@ -44,14 +44,22 @@
|
||||
|
||||
- name: Ensure systemd service file for '{{ restic_job_name }}' is templated
|
||||
template:
|
||||
dest: "/etc/systemd/system/{{ restic_systemd_unit_naming_scheme }}.service"
|
||||
src: restic.service.j2
|
||||
dest: "/etc/systemd/system/{{ service.unit_name }}.service"
|
||||
src: "{{ service.file }}"
|
||||
owner: root
|
||||
group: root
|
||||
mode: 0640
|
||||
notify:
|
||||
- reload-systemd
|
||||
- trigger-restic
|
||||
loop:
|
||||
- unit_name: "{{ restic_systemd_unit_naming_scheme }}"
|
||||
file: restic.service.j2
|
||||
- unit_name: "{{ restic_systemd_unit_naming_scheme }}-unlock"
|
||||
file: restic-unlock.service.j2
|
||||
loop_control:
|
||||
loop_var: service
|
||||
label: "{{ service.file }}"
|
||||
|
||||
- name: Ensure systemd service file for '{{ restic_job_name }}' is templated
|
||||
template:
|
||||
@ -66,6 +74,11 @@
|
||||
- name: Flush handlers to ensure systemd knows about '{{ restic_job_name }}'
|
||||
meta: flush_handlers
|
||||
|
||||
- name: Ensure systemd service for unlocking repository for '{{ restic_job_name }}' is enabled
|
||||
systemd:
|
||||
name: "{{ restic_systemd_unit_naming_scheme }}-unlock.service"
|
||||
enabled: true
|
||||
|
||||
- name: Ensure systemd timer for '{{ restic_job_name }}' is activated
|
||||
systemd:
|
||||
name: "{{ restic_systemd_unit_naming_scheme }}.timer"
|
||||
|
21
roles/restic/templates/restic-unlock.service.j2
Normal file
21
roles/restic/templates/restic-unlock.service.j2
Normal file
@ -0,0 +1,21 @@
|
||||
[Unit]
|
||||
Description={{ restic_job_description }} - Unlock after reboot job
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
User={{ restic_user }}
|
||||
WorkingDirectory={{ restic_systemd_working_directory }}
|
||||
SyslogIdentifier={{ restic_systemd_syslog_identifier }}
|
||||
|
||||
Environment=RESTIC_REPOSITORY={{ restic_repo_url }}
|
||||
Environment=RESTIC_PASSWORD={{ restic_repo_password }}
|
||||
{% if restic_s3_key_id and restic_s3_access_key %}
|
||||
Environment=AWS_ACCESS_KEY_ID={{ restic_s3_key_id }}
|
||||
Environment=AWS_SECRET_ACCESS_KEY={{ restic_s3_access_key }}
|
||||
{% endif %}
|
||||
|
||||
ExecStartPre=-/bin/sh -c '/usr/bin/restic snapshots || /usr/bin/restic init'
|
||||
ExecStart=/usr/bin/restic unlock
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
Reference in New Issue
Block a user