diff --git a/roles/pgweb/tasks/main.yml b/roles/pgweb/tasks/main.yml index 08a899b..7ead00d 100644 --- a/roles/pgweb/tasks/main.yml +++ b/roles/pgweb/tasks/main.yml @@ -1,17 +1,28 @@ --- -- include: user.yml -- include: directories.yml -- include: facts.yml -- include: archive_pre.yml - when: pgweb_install_mode == 'upgrade' -- include: install.yml -- include: conf.yml -- include: iptables.yml +- include_tasks: user.yml + tags: always +- include_tasks: directories.yml + tags: always +- include_tasks: facts.yml + tags: always +- include_tasks: archive_pre.yml + when: pgweb_install_mode | default('none') == 'upgrade' + tags: always +- include_tasks: install.yml + tags: always +- include_tasks: conf.yml + tags: always +- include_tasks: iptables.yml when: iptables_manage | default(True) -- include: services.yml -- include: archive_post.yml - when: pgweb_install_mode == 'upgrade' -- include: write_version.yml -- include: cleanup.yml + tags: always +- include_tasks: services.yml + tags: always +- include_tasks: archive_post.yml + when: pgweb_install_mode | default('none') == 'upgrade' + tags: always +- include_tasks: write_version.yml + tags: always +- include_tasks: cleanup.yml + tags: always diff --git a/roles/system_proxy/tasks/main.yml b/roles/system_proxy/tasks/main.yml index 718860d..555a353 100644 --- a/roles/system_proxy/tasks/main.yml +++ b/roles/system_proxy/tasks/main.yml @@ -40,5 +40,6 @@ notify: reload systemd tags: proxy -- include: "{{ ansible_os_family }}.yml" +- include_tasks: "{{ ansible_os_family }}.yml" + tags: always diff --git a/roles/taiga/tasks/main.yml b/roles/taiga/tasks/main.yml index 08890e4..856c7c0 100644 --- a/roles/taiga/tasks/main.yml +++ b/roles/taiga/tasks/main.yml @@ -1,14 +1,24 @@ --- -- include: user.yml -- include: directories.yml -- include: facts.yml -- include: archive_pre.yml - when: taiga_install_mode == 'upgrade' -- include: install.yml -- include: conf.yml -- include: services.yml -- include: write_version.yml -- include: archive_post.yml - when: taiga_install_mode == 'upgrade' -- include: cleanup.yml +- include_tasks: user.yml + tags: always +- include_tasks: directories.yml + tags: always +- include_tasks: facts.yml + tags: always +- include_tasks: archive_pre.yml + when: taiga_install_mode | default('none') == 'upgrade' + tags: always +- include_tasks: install.yml + tags: always +- include_tasks: conf.yml + tags: always +- include_tasks: services.yml + tags: always +- include_tasks: write_version.yml + tags: always +- include_tasks: archive_post.yml + when: taiga_install_mode | default('none') == 'upgrade' + tags: always +- include_tasks: cleanup.yml + tags: always diff --git a/roles/tomcat/tasks/main.yml b/roles/tomcat/tasks/main.yml index ddfcf45..8df07f0 100644 --- a/roles/tomcat/tasks/main.yml +++ b/roles/tomcat/tasks/main.yml @@ -1,7 +1,11 @@ --- -- include: install.yml -- include: conf.yml -- include: iptables.yml +- include_tasks: install.yml + tags: always +- include_tasks: conf.yml + tags: always +- include_tasks: iptables.yml when: iptables_manage | default(True) -- include: services.yml + tags: always +- include_tasks: services.yml + tags: always diff --git a/roles/turnserver/defaults/main.yml b/roles/unmaintained/turnserver/defaults/main.yml similarity index 100% rename from roles/turnserver/defaults/main.yml rename to roles/unmaintained/turnserver/defaults/main.yml diff --git a/roles/turnserver/files/dehydrated_deploy_hook b/roles/unmaintained/turnserver/files/dehydrated_deploy_hook similarity index 100% rename from roles/turnserver/files/dehydrated_deploy_hook rename to roles/unmaintained/turnserver/files/dehydrated_deploy_hook diff --git a/roles/turnserver/files/turnserver.service b/roles/unmaintained/turnserver/files/turnserver.service similarity index 100% rename from roles/turnserver/files/turnserver.service rename to roles/unmaintained/turnserver/files/turnserver.service diff --git a/roles/turnserver/handlers/main.yml b/roles/unmaintained/turnserver/handlers/main.yml similarity index 100% rename from roles/turnserver/handlers/main.yml rename to roles/unmaintained/turnserver/handlers/main.yml diff --git a/roles/turnserver/tasks/main.yml b/roles/unmaintained/turnserver/tasks/main.yml similarity index 100% rename from roles/turnserver/tasks/main.yml rename to roles/unmaintained/turnserver/tasks/main.yml diff --git a/roles/turnserver/templates/turnserver.conf.j2 b/roles/unmaintained/turnserver/templates/turnserver.conf.j2 similarity index 100% rename from roles/turnserver/templates/turnserver.conf.j2 rename to roles/unmaintained/turnserver/templates/turnserver.conf.j2 diff --git a/roles/unmaintained/wh_backend/handlers/main.yml b/roles/unmaintained/wh_backend/handlers/main.yml new file mode 100644 index 0000000..2c24c96 --- /dev/null +++ b/roles/unmaintained/wh_backend/handlers/main.yml @@ -0,0 +1,9 @@ +--- + +- name: reset permissions + command: sh /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/bin/perms.sh + loop: "{{ wh_clients | subelements('apps') }}" + when: item.1.backend | default(item.0.backend) | default(wh_defaults.backend) == inventory_hostname + +- name: restart wh-acld + service: name=wh-acld state=restarted diff --git a/roles/unmaintained/wh_backend/meta/main.yml b/roles/unmaintained/wh_backend/meta/main.yml new file mode 100644 index 0000000..727489b --- /dev/null +++ b/roles/unmaintained/wh_backend/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - role: wh_common + - role: httpd_php diff --git a/roles/unmaintained/wh_backend/tasks/main.yml b/roles/unmaintained/wh_backend/tasks/main.yml new file mode 100644 index 0000000..0124231 --- /dev/null +++ b/roles/unmaintained/wh_backend/tasks/main.yml @@ -0,0 +1,207 @@ +--- + +- include_vars: "{{ item }}" + with_first_found: + - vars/{{ ansible_distribution }}-{{ ansible_distribution_major_version }}.yml + - vars/{{ ansible_os_family }}-{{ ansible_distribution_major_version }}.yml + - vars/{{ ansible_distribution }}.yml + - vars/{{ ansible_os_family }}.yml + tags: web + +- name: Install needed tools + yum: name{{ wh_backend_packages }} + tags: web + +- set_fact: wh_app_dir=[] + tags: web +- name: Build a list of app root + set_fact: + wh_app_dir: "{{ wh_app_dir }} + [ '/opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}' ]" + loop: "{{ wh_clients | subelements('apps') }}" + when: item.1.backend | default(item.0.backend) | default(wh_defaults.backend) == inventory_hostname + tags: web + +- name: Create unix accounts + user: + name: "wh-{{ item.name }}" + comment: "Unix account for {{ item.name }}" + system: True + shell: "{{ shell | default('/sbin/nologin') }}" + home: /opt/wh/{{ item.name }} + loop: "{{ wh_clients }}" + tags: web + +- name: Create ssh directories + file: path=/etc/ssh/wh/{{ item.name }}/ state=directory mode=755 + loop: "{{ wh_clients }}" + tags: web + +- name: Deploy SSH keys + authorized_key: + user: root + key: "{{ item.ssh_keys | default([]) | join(\"\n\") }}" + path: /etc/ssh/wh/{{ item.name }}/authorized_keys + manage_dir: False + exclusive: True + loop: "{{ wh_clients }}" + tags: web + +- name: Set correct permissions on authorized_key files + file: path=/etc/ssh/wh/{{ item.name }}/authorized_keys owner=root group=root mode=644 + loop: "{{ wh_clients }}" + when: item.ssh_keys | default([]) | length > 0 + tags: web + +- name: List all authorized keys directories + shell: ls -1 /etc/ssh/wh | xargs -n1 basename + register: wh_existing_ssh_keys + changed_when: False + tags: web + +- name: Remove unmanaged ssh keys + file: path=/etc/ssh/wh/{{ item }} state=absent + with_items: "{{ wh_existing_ssh_keys.stdout_lines | default([]) }}" + when: item not in wh_clients | map(attribute='name') + tags: web + +- name: Create applications directories + file: path={{ item.0 }}/{{ item.1 }} state=directory + loop: "{{ wh_app_dir | product(['web','data','tmp','logs','archives','bin','info', 'db_dumps']) | list }}" + notify: reset permissions + tags: web + +- name: Set correct SELinux context for apps directories + sefcontext: + target: "{{ item }}(/.*)?" + setype: httpd_sys_content_t + state: present + when: ansible_selinux.status == 'enabled' + loop: "{{ wh_app_dir }}" + notify: reset permissions + tags: web + +- name: Deploy PHP FPM pools + template: src=php-fpm.conf.j2 dest=/etc/opt/remi/php{{ item }}/php-fpm.d/wh.conf + vars: + wh_php_version: "{{ item }}" + loop: "{{ httpd_php_versions }}" + notify: restart php-fpm + tags: web + +- name: Deploy httpd configuration + template: src=httpd.conf.j2 dest=/etc/httpd/ansible_conf.d/31-wh.conf + notify: reload httpd + tags: web + +- name: Deploy permissions scripts + template: src=perms.sh.j2 dest=/opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/bin/perms.sh + loop: "{{ wh_clients | subelements('apps') }}" + when: item.1.backend | default(item.0.backend) | default(wh_defaults.backend) == inventory_hostname + notify: reset permissions + tags: web + +- name: Create databases + mysql_db: + name: "{{ item.0.name[0:7] }}_{{ item.1.name[0:7] }}" + login_host: "{{ (wh_default_app | combine(item.1)).database.server | default(mysql_server) }}" + login_user: sqladmin + login_password: "{{ mysql_admin_pass }}" + collation: "{{ (wh_default_app | combine(item.1)).database.collation }}" + encoding: "{{ (wh_default_app | combine(item.1)).database.encoding }}" + state: present + loop: "{{ wh_clients | subelements('apps') }}" + when: + - (wh_default_app | combine(item.1)).database.enabled + - (wh_default_app | combine(item.1)).database.engine == 'mysql' + - item.1.backend | default(item.0.backend) | default(wh_defaults.backend) == inventory_hostname + tags: web + +- name: Create applications database users + mysql_user: + name: "{{ item.0.name[0:7] }}_{{ item.1.name[0:7] }}" + password: "{{ (wh_default_app | combine(item.1)).database.pass | default((wh_pass_seed | password_hash('sha256', 65534 | random(seed=item.0.name + item.1.name) | string))[9:27] ) }}" + priv: "{{ item.0.name[0:7] }}_{{ item.1.name[0:7] }}.*:ALL" + host: "%" + login_host: "{{ (wh_default_app | combine(item.1)).database.server | default(mysql_server) }}" + login_user: sqladmin + login_password: "{{ mysql_admin_pass }}" + state: present + loop: "{{ wh_clients | subelements('apps') }}" + when: + - (wh_default_app | combine(item.1)).database.enabled + - (wh_default_app | combine(item.1)).database.engine == 'mysql' + - item.1.backend | default(item.0.backend) | default(wh_defaults.backend) == inventory_hostname + tags: web + +- name: Create clients database user + mysql_user: + name: "{{ item.0.name[0:15] }}" + password: "{{ item.0.db_pass | default((wh_pass_seed | password_hash('sha256', 65534 | random(seed=item.0.name) | string))[9:27]) }}" + priv: "{{ item.0.name[0:7] }}_{{ item.1.name[0:7] }}.*:ALL" + host: "%" + login_host: "{{ (wh_default_app | combine(item.1)).database.server | default(mysql_server) }}" + login_user: sqladmin + login_password: "{{ mysql_admin_pass }}" + append_privs: True + state: present + loop: "{{ wh_clients | subelements('apps')}}" + when: + - (wh_default_app | combine(item.1)).database.enabled + - (wh_default_app | combine(item.1)).database.engine == 'mysql' + - item.1.backend | default(item.0.backend) | default(wh_defaults.backend) == inventory_hostname + tags: web + +- name: Deploy databases info file + template: src=database.txt.j2 dest=/opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/info/database.txt + loop: "{{ wh_clients | subelements('apps') }}" + when: item.1.backend | default(item.0.backend) | default(wh_defaults.backend) == inventory_hostname + notify: reset permissions + tags: web + +- name: Deploy per app backup scripts + template: src=backup.sh.j2 dest=/opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/bin/backup.sh mode=750 + loop: "{{ wh_clients | subelements('apps') }}" + when: item.1.backend | default(item.0.backend) | default(wh_defaults.backend) == inventory_hostname + tags: web + +- name: Deploy wh_create_archives script to archive all the hosted apps + template: src=wh_create_archives.sh.j2 dest=/usr/local/bin/wh_create_archives.sh mode=750 + tags: web + +- name: Setup a daily cronjob to take automatic archives of webapps + cron: + name: wh_backups + special_time: daily + user: root + job: 'systemd-cat /usr/local/bin/wh_create_archives.sh' + cron_file: wh + state: present + tags: web + +- name: Deploy global pre/post backup scripts + template: src={{ item }}_backup.sh.j2 dest=/etc/backup/{{ item }}.d/wh.sh mode=700 + loop: [ 'pre', 'post' ] + tags: web + +- name: Deploy logrotate snippet + template: src=logrotate.j2 dest=/etc/logrotate.d/wh + tags: web + +- name: Deploy wh-acld + template: src=wh-acld.j2 dest=/usr/local/bin/wh-acld mode=750 + notify: restart wh-acld + tags: web + +- name: Deploy wh-acld service unit + template: src=wh-acld.service.j2 dest=/etc/systemd/system/wh-acld.service + register: wh_acld_unit + tags: web + +- name: Reload systemd + systemd: daemon_reload=True + when: wh_acld_unit.changed + tags: web + +- name: Start and enable wh-acld + service: name=wh-acld state=started enabled=True + tags: web diff --git a/roles/unmaintained/wh_backend/templates/backup.sh.j2 b/roles/unmaintained/wh_backend/templates/backup.sh.j2 new file mode 100644 index 0000000..71160e2 --- /dev/null +++ b/roles/unmaintained/wh_backend/templates/backup.sh.j2 @@ -0,0 +1,14 @@ +#!/bin/bash -e + +cd /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }} +# Remove old archives +find archives/ -type f -mtime +2 -exec rm -f "{}" \; +# Create the new daily archive, with a dump of the DB and the web, data and logs dir +TS=$(date +%Y-%m-%d_%Hh%M) +mysqldump --add-drop-table --single-transaction \ + --host={{ (wh_default_app | combine(item.1)).database.server | default(mysql_server) }} \ + --user={{ item.0.name[0:7] }}_{{ item.1.name[0:7] }} \ + --password="{{ (wh_default_app | combine(item.1)).database.pass | default((wh_pass_seed | password_hash('sha256', 65534 | random(seed=item.0.name + item.1.name) | string))[9:27] ) }}" \ + {{ item.0.name[0:7] }}_{{ item.1.name[0:7] }} | \ + zstd -c > archives/$TS.sql.zst +ZSTD_CLEVEL=15 ZSTD_NBTHREADS=0 nice -n 9 tar cf archives/$TS.tar.zst --use-compress-program=zstd data web logs diff --git a/roles/unmaintained/wh_backend/templates/database.txt.j2 b/roles/unmaintained/wh_backend/templates/database.txt.j2 new file mode 100644 index 0000000..0a322e5 --- /dev/null +++ b/roles/unmaintained/wh_backend/templates/database.txt.j2 @@ -0,0 +1,5 @@ +Type: {{ (wh_default_app | combine(item.1)).database.engine | default('mysql') }} +Server: {{ (wh_default_app | combine(item.1)).database.server | default(mysql_server) }} +Database: {{ item.0.name[0:7] }}_{{ item.1.name[0:7] }} +User: {{ item.0.name[0:7] }}_{{ item.1.name[0:7] }} +Password: {{ (wh_default_app | combine(item.1)).database.pass | default((wh_pass_seed | password_hash('sha256', 65534 | random(seed=item.0.name + item.1.name) | string))[9:27] ) }} diff --git a/roles/unmaintained/wh_backend/templates/httpd.conf.j2 b/roles/unmaintained/wh_backend/templates/httpd.conf.j2 new file mode 100644 index 0000000..f8ecd04 --- /dev/null +++ b/roles/unmaintained/wh_backend/templates/httpd.conf.j2 @@ -0,0 +1,44 @@ +# {{ ansible_managed }} + +{% for client in wh_clients %} +{% for app in client.apps | default([]) %} +{% set app = wh_default_app | combine(app, recursive=True) %} +{% if app.backend | default(client.backend) | default(wh_defaults.backend) == inventory_hostname %} +################################################ +## vhost for {{ client.name }}-{{ app.name }} +################################################ + + + ServerName {{ app.vhost | default(client.name + '-' + app.name + '.wh.fws.fr') }} +{% if app.aliases | length > 0 %} + ServerAlias {{ app.aliases | join(' ') }} +{% endif %} + ServerAdmin webmaster@fws.fr + DocumentRoot /opt/wh/{{ client.name }}/apps/{{ app.name }}/web + Alias /_deferror/ "/usr/share/httpd/error/" + Include ansible_conf.d/common_env.inc + ProxyTimeout {{ app.php.max_execution_time }} + + +################################################ +## webroot for {{ client.name }}-{{ app.name }} +################################################ + + + AllowOverride All + Options FollowSymLinks + Require all granted +{% if app.php.enabled %} + + SetHandler "proxy:unix:/run/php-fpm/php{{ app.php.version}}-{{ client.name }}-{{ app.name }}.sock|fcgi://localhost" + +{% endif %} + + + Require all denied + + + +{% endif %} +{% endfor %} +{% endfor %} diff --git a/roles/unmaintained/wh_backend/templates/logrotate.j2 b/roles/unmaintained/wh_backend/templates/logrotate.j2 new file mode 100644 index 0000000..c5a634a --- /dev/null +++ b/roles/unmaintained/wh_backend/templates/logrotate.j2 @@ -0,0 +1,21 @@ +# {{ ansible_managed }} +{% for client in wh_clients %} +{% for app in client.apps %} +{% set app = wh_default_app | combine(app, recursive=True) %} +{% if app.backend | default(client.backend) | default(wh_defaults.backend) == inventory_hostname %} +/opt/wh/{{ client.name }}/apps/{{ app.name }}/logs/*.log { + rotate 52 + weekly + copytruncate + missingok + compress + compressoptions -T0 + compresscmd /bin/xz + uncompresscmd /bin/unxz + compressext .xz + su {{ app.run_as | default('wh-' + client.name) }} {{ app.run_as | default('wh-' + client.name) }} +} + +{% endif %} +{% endfor %} +{% endfor %} diff --git a/roles/unmaintained/wh_backend/templates/perms.sh.j2 b/roles/unmaintained/wh_backend/templates/perms.sh.j2 new file mode 100644 index 0000000..8a12bc3 --- /dev/null +++ b/roles/unmaintained/wh_backend/templates/perms.sh.j2 @@ -0,0 +1,35 @@ +#!/bin/sh + +# Set correct SELinux label +restorecon -R /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }} + +# Remove all the ACL so we can start from scratch +setfacl -R --remove-all --remove-default /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }} + +# Set permissions on the top level client dir. Not recursively ! +# Here, the corresponding client only has read permissions (pus the tech team) +chown root:root /opt/wh/{{ item.0.name }}/{,apps} +chmod 750 /opt/wh/{{ item.0.name }}/ +chmod 755 /opt/wh/{{ item.0.name }}/apps +setfacl -m u:apache:rX,g:Tech:rX,g:Client_{{ item.0.name }}:rX,u:{{ item.1.run_as | default('wh-' + item.0.name) }}:rX /opt/wh/{{ item.0.name }}/ + +# Set decent permissions, aka rw for files and rwx for directories. With setgid so the group owner is inherited to new files +find /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/{data,tmp,web} -type f -exec chmod 660 "{}" \; +find /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/{data,tmp,web} -type d -exec chmod 2770 "{}" \; + +# Now, grant apache read access (needed for serving static assets), and full rw access to the client group. Set mask to full permission, we don't want to limit ACL. And excplicitely set other perms to 0 +# Members of the tech team has write access for install/debug +setfacl -R -m u:apache:rX,d:u:apache:rX,g:Tech:rwX,d:g:Tech:rwX,g:Client_{{ item.0.name }}:rwX,d:g:Client_{{ item.0.name }}:rwX,u:{{ item.1.run_as | default('wh-' + item.0.name) }}:rwX,d:u:{{ item.1.run_as | default('wh-' + item.0.name) }}:rwX,m:rwX,o:- /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }} + +# The bin folder shouldn't be visible to the client, it only contains admin's scripts +setfacl -R --remove-all --remove-default /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/bin +chown -R root:root /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/bin +chmod 700 /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/bin +chmod 750 /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/bin/* + +# Info is readonly for the client (and the tech team) +setfacl -R --remove-all --remove-default /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/info +chown -R root:Client_{{ item.0.name }} /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/info +chmod 750 /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/info +chmod 640 /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/info/* +setfacl -R -m g:Tech:rX,d:g:Tech:rX,m:rwX,o:- /opt/wh/{{ item.0.name }}/apps/{{ item.1.name }}/info diff --git a/roles/unmaintained/wh_backend/templates/php-fpm.conf.j2 b/roles/unmaintained/wh_backend/templates/php-fpm.conf.j2 new file mode 100644 index 0000000..1e48e41 --- /dev/null +++ b/roles/unmaintained/wh_backend/templates/php-fpm.conf.j2 @@ -0,0 +1,61 @@ +; {{ ansible_managed }} + +{% for client in wh_clients | default([]) %} + +{% for app in client.apps | default([]) %} + +{% set app = wh_default_app | combine(app, recursive=True) %} + +{% if app.php.enabled and app.php.version | string == wh_php_version | string and app.backend | default(client.backend) | default(wh_defaults.backend) == inventory_hostname %} + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Begin pool {{ client.name }}-{{ app.name }} +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +[{{ client.name }}-{{ app.name }}] +listen.owner = root +listen.group = {{ httpd_group }} +listen.mode = 0660 +listen = /run/php-fpm/php{{ wh_php_version }}-{{ client.name }}-{{ app.name }}.sock +user = {{ client.run_as | default('wh-' + client.name) }} +group = {{ client.run_as | default('wh-' + client.name) }} +catch_workers_output = yes + +pm = dynamic +pm.max_children = 15 +pm.start_servers = 3 +pm.min_spare_servers = 3 +pm.max_spare_servers = 6 +pm.max_requests = 5000 +request_terminate_timeout = 5m + +php_flag[display_errors] = {{ app.php.display_error | ternary('on','off') }} +php_admin_flag[log_errors] = on +php_admin_value[error_log] = /opt/wh/{{ client.name }}/apps/{{ app.name }}/logs/php_error.log +php_admin_value[memory_limit] = {{ app.php.memory_limit }} +php_admin_value[session.save_path] = /opt/wh/{{ client.name }}/apps/{{ app.name }}/tmp +php_admin_value[upload_tmp_dir] = /opt/wh/{{ client.name }}/apps/{{ app.name }}/tmp +php_admin_value[sys_temp_dir] = /opt/wh/{{ client.name }}/apps/{{ app.name }}/tmp +php_admin_value[post_max_size] = {{ app.php.upload_max_filesize }} +php_admin_value[upload_max_filesize] = {{ app.php.upload_max_filesize }} +php_admin_value[disable_functions] = {{ app.php.disabled_functions | difference(app.php.enabled_functions) | join(', ') }} +php_admin_value[open_basedir] = /opt/wh/{{ client.name }}/apps +php_admin_value[max_execution_time] = {{ app.php.max_execution_time }} +php_admin_value[max_input_time] = {{ app.php.max_execution_time }} +php_admin_flag[allow_url_include] = off +php_admin_flag[allow_url_fopen] = {{ app.php.allow_url_fopen | ternary('on','off') }} +php_admin_flag[file_uploads] = {{ app.php.file_uploads | ternary('on','off') }} +php_admin_flag[session.cookie_httponly] = on +{% if app.php.custom_conf is defined %} +{{ app.php.custom_conf }} +{% endif %} + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; End pool {{ client.name }}-{{ app.name }} +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +{% endif %} + +{% endfor %} + +{% endfor %} diff --git a/roles/unmaintained/wh_backend/templates/post_backup.sh.j2 b/roles/unmaintained/wh_backend/templates/post_backup.sh.j2 new file mode 100644 index 0000000..e9c1a8c --- /dev/null +++ b/roles/unmaintained/wh_backend/templates/post_backup.sh.j2 @@ -0,0 +1,3 @@ +#!/bin/bash -e + +rm -f /opt/wh/*/apps/*/db_dumps/*.sql.zst diff --git a/roles/unmaintained/wh_backend/templates/pre_backup.sh.j2 b/roles/unmaintained/wh_backend/templates/pre_backup.sh.j2 new file mode 100644 index 0000000..b58a490 --- /dev/null +++ b/roles/unmaintained/wh_backend/templates/pre_backup.sh.j2 @@ -0,0 +1,17 @@ +#!/bin/sh + +set -eo pipefail + +{% for client in wh_clients %} +{% for app in client.apps | default([]) %} +{% set app = wh_default_app | combine(app, recursive=True) %} +{% if app.backend | default(client.backend) | default(wh_defaults.backend) == inventory_hostname %} +mysqldump --add-drop-table --single-transaction \ + --host={{ (wh_default_app | combine(app)).database.server | default(mysql_server) }} \ + --user={{ client.name[0:7] }}_{{ app.name[0:7] }} \ + --password="{{ (wh_default_app | combine(app)).database.pass | default((wh_pass_seed | password_hash('sha256', 65534 | random(seed=client.name + app.name) | string))[9:27] ) }}" \ + {{ client.name[0:7] }}_{{ app.name[0:7] }} | \ + zstd -c > /opt/wh/{{ client.name }}/apps/{{ app.name }}/db_dumps/{{ client.name[0:7] }}_{{ app.name[0:7] }}.sql.zst +{% endif %} +{% endfor %} +{% endfor %} diff --git a/roles/unmaintained/wh_backend/templates/wh-acld.j2 b/roles/unmaintained/wh_backend/templates/wh-acld.j2 new file mode 100644 index 0000000..44c6878 --- /dev/null +++ b/roles/unmaintained/wh_backend/templates/wh-acld.j2 @@ -0,0 +1,17 @@ +#!/bin/bash -e + +while true; do +{% for client in wh_clients %} +{% for app in client.apps %} +{% if app.backend | default(client.backend) | default(wh_defaults.backend) == inventory_hostname %} + if [ -e /opt/wh/{{ client.name }}/apps/{{ app.name }}/tmp/reset -o -e /opt/wh/{{ client.name }}/apps/{{ app.name }}/tmp/reset.txt ]; then + echo Reseting permissions for {{ client.name }} - {{ app.name }} + sh /opt/wh/{{ client.name }}/apps/{{ app.name }}/bin/perms.sh + echo Permissions for {{ client.name }} - {{ app.name }} have been reseted + rm -f /opt/wh/{{ client.name }}/apps/{{ app.name }}/tmp/reset /opt/wh/{{ client.name }}/apps/{{ app.name }}/tmp/reset.txt + fi +{% endif %} +{% endfor %} +{% endfor %} + sleep 5 +done diff --git a/roles/unmaintained/wh_backend/templates/wh-acld.service.j2 b/roles/unmaintained/wh_backend/templates/wh-acld.service.j2 new file mode 100644 index 0000000..7bbe998 --- /dev/null +++ b/roles/unmaintained/wh_backend/templates/wh-acld.service.j2 @@ -0,0 +1,14 @@ +[Unit] +Description=Web Hosting ACL monitor daemon + +[Service] +Type=simple +ExecStart=/usr/local/bin/wh-acld +PrivateTmp=yes +PrivateDevices=yes +MemoryLimit=100M +Restart=on-failure + +[Install] +WantedBy=multi-user.target + diff --git a/roles/unmaintained/wh_backend/templates/wh_create_archives.sh.j2 b/roles/unmaintained/wh_backend/templates/wh_create_archives.sh.j2 new file mode 100644 index 0000000..0c29d64 --- /dev/null +++ b/roles/unmaintained/wh_backend/templates/wh_create_archives.sh.j2 @@ -0,0 +1,13 @@ +#!/bin/bash -e + +{% for client in wh_clients %} +{% for app in client.apps %} +{% set app = wh_default_app | combine(app, recursive=True) %} +{% if app.backend | default(client.backend) | default(wh_defaults.backend) == inventory_hostname %} +echo Starting archiving {{ client.name }} - {{ app.name }} +sh /opt/wh/{{ client.name }}/apps/{{ app.name }}/bin/backup.sh +echo Archive for {{ client.name }} - {{ app.name }} created +{% endif %} + +{% endfor %} +{% endfor %} diff --git a/roles/unmaintained/wh_backend/vars/RedHat-7.yml b/roles/unmaintained/wh_backend/vars/RedHat-7.yml new file mode 100644 index 0000000..d737ec9 --- /dev/null +++ b/roles/unmaintained/wh_backend/vars/RedHat-7.yml @@ -0,0 +1,6 @@ +--- + +wh_backend_packages: + - acl + - MySQL-python + - mariadb diff --git a/roles/unmaintained/wh_backend/vars/RedHat-8.yml b/roles/unmaintained/wh_backend/vars/RedHat-8.yml new file mode 100644 index 0000000..b789d6f --- /dev/null +++ b/roles/unmaintained/wh_backend/vars/RedHat-8.yml @@ -0,0 +1,7 @@ +--- + +wh_backend_packages: + - acl + - python3-mysql + - mariadb + diff --git a/roles/unmaintained/wh_common/defaults/main.yml b/roles/unmaintained/wh_common/defaults/main.yml new file mode 100644 index 0000000..1ec7df5 --- /dev/null +++ b/roles/unmaintained/wh_common/defaults/main.yml @@ -0,0 +1,3 @@ +--- + +wh_clients: [] diff --git a/roles/unmaintained/wh_common/tasks/facts.yml b/roles/unmaintained/wh_common/tasks/facts.yml new file mode 100644 index 0000000..3e57c33 --- /dev/null +++ b/roles/unmaintained/wh_common/tasks/facts.yml @@ -0,0 +1,15 @@ +--- +- name: Build a list of Zimbra domains + set_fact: wh_mail_domains={{ wh_clients | selectattr('mail', 'defined') | selectattr('mail.enabled', 'equalto', True) | selectattr('mail.domain', 'defined') | map(attribute='mail.domain') | list }} + tags: mail + +- name: Build a list of Zimbra domain aliases + set_fact: wh_mail_aliases={{ wh_mail_aliases | default([]) + item.mail.domain_aliases }} + loop: "{{ wh_clients }}" + when: + - item.mail is defined + - item.mail.enabled is defined + - item.mail.enabled + - item.mail.domain_aliases is defined + - item.mail.domain_aliases | length > 0 + tags: mail diff --git a/roles/unmaintained/wh_common/tasks/main.yml b/roles/unmaintained/wh_common/tasks/main.yml new file mode 100644 index 0000000..ad3c4df --- /dev/null +++ b/roles/unmaintained/wh_common/tasks/main.yml @@ -0,0 +1,3 @@ +--- + +- include: facts.yml diff --git a/roles/unmaintained/wh_common/vars/main.yml b/roles/unmaintained/wh_common/vars/main.yml new file mode 100644 index 0000000..e265898 --- /dev/null +++ b/roles/unmaintained/wh_common/vars/main.yml @@ -0,0 +1,40 @@ +--- + +wh_default_client: + ssh_keys: [] +wh_default_app: + allow_ip: [] + deny_ip: [] + aliases: [] + maintenance: False + php: + enabled: True + version: 73 + memory_limit: 256M + display_error: False + file_uploads: True + upload_max_filesize: 10M + disabled_functions: + - system + - show_source + - symlink + - exec + - dl + - shell_exec + - passthru + - phpinfo + - escapeshellarg + - escapeshellcmd + enabled_functions: [] + allow_url_fopen: False + max_execution_time: 900 + # custom_conf: | + # php_admin_value[sendmail_path] = /usr/sbin/sendmail -t -i -f no-reply@domain.tld + cache: enabled + force_https: True + letsencrypt_cert: False + database: + enabled: True + engine: mysql + collation: utf8mb4_unicode_ci + encoding: utf8mb4 diff --git a/roles/unmaintained/wh_pmg/meta/main.yml b/roles/unmaintained/wh_pmg/meta/main.yml new file mode 100644 index 0000000..9ef38c3 --- /dev/null +++ b/roles/unmaintained/wh_pmg/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - role: wh_common + - role: pmg diff --git a/roles/unmaintained/wh_pmg/tasks/facts.yml b/roles/unmaintained/wh_pmg/tasks/facts.yml new file mode 100644 index 0000000..31e9cb8 --- /dev/null +++ b/roles/unmaintained/wh_pmg/tasks/facts.yml @@ -0,0 +1,4 @@ +--- + +- set_fact: wh_mail_domains_to_relay={{ wh_mail_domains + wh_mail_aliases | default([]) }} + tags: mail diff --git a/roles/unmaintained/wh_pmg/tasks/main.yml b/roles/unmaintained/wh_pmg/tasks/main.yml new file mode 100644 index 0000000..8fe936b --- /dev/null +++ b/roles/unmaintained/wh_pmg/tasks/main.yml @@ -0,0 +1,22 @@ +--- + +- include: facts.yml + +- name: List configured relay domains + command: pmgsh get /config/domains + register: wh_pmg_domains + changed_when: False + tags: mail +- set_fact: wh_pmg_domains={{ wh_pmg_domains.stdout | from_json | map(attribute='domain') | list }} + tags: mail + +- name: Create domains in PMG relay table + command: pmgsh create /config/domains --domain "{{ item }}" + loop: "{{ wh_mail_domains_to_relay }}" + when: item not in wh_pmg_domains + tags: mail + +- name: Remove domains from PMG relay table + command: pmgsh delete /config/domains/{{ item }} + loop: "{{ wh_pmg_domains }}" + when: item not in wh_mail_domains_to_relay diff --git a/roles/unmaintained/wh_proxy/handlers/main.yml b/roles/unmaintained/wh_proxy/handlers/main.yml new file mode 100644 index 0000000..18b6c3e --- /dev/null +++ b/roles/unmaintained/wh_proxy/handlers/main.yml @@ -0,0 +1,2 @@ +--- +- include: ../nginx/handlers/main.yml diff --git a/roles/unmaintained/wh_proxy/meta/main.yml b/roles/unmaintained/wh_proxy/meta/main.yml new file mode 100644 index 0000000..4fcd8c3 --- /dev/null +++ b/roles/unmaintained/wh_proxy/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - role: wh_common + - role: nginx diff --git a/roles/unmaintained/wh_proxy/tasks/main.yml b/roles/unmaintained/wh_proxy/tasks/main.yml new file mode 100644 index 0000000..615049e --- /dev/null +++ b/roles/unmaintained/wh_proxy/tasks/main.yml @@ -0,0 +1,46 @@ +--- + +- set_fact: role_wh_proxy={{ True }} + tags: web + +- name: Deploy web hosting vhosts + template: src=nginx_vhosts.conf.j2 dest=/etc/nginx/ansible_conf.d/31-vhosts_wh.conf + notify: reload nginx + tags: web + +- name: Build a list of client vhosts + set_fact: + wh_vhosts: "{{ wh_vhosts | default([]) + [ item.1.vhost | default(item.0.name + '-' + item.1.name + '.wh.fws.fr') ] }}" + loop: "{{ wh_clients | default([]) | subelements('apps') }}" + tags: web + +- name: Check if Let's Encrypt's cert exist (web hosting) + stat: path=/var/lib/dehydrated/certificates/certs/{{ item }}/fullchain.pem + register: wh_letsencrypt_certs + with_items: "{{ wh_vhosts }}" + tags: web + +- name: Create directories for missing Let's Encrypt cert (web hosting) + file: path=/var/lib/dehydrated/certificates/certs/{{ item.item }} state=directory + with_items: "{{ wh_letsencrypt_certs.results }}" + when: + - item.stat is defined + - not item.stat.exists + tags: web + +- name: Link missing Let's Encrypt cert to the default one (web hosting) + file: src={{ nginx_cert_path }} dest=/var/lib/dehydrated/certificates/certs/{{ item.item }}/fullchain.pem state=link + with_items: "{{ wh_letsencrypt_certs.results }}" + when: + - item.stat is defined + - not item.stat.exists + tags: web + +- name: Link missing Let's Encrypt key to the default one (web hosting) + file: src={{ nginx_key_path }} dest=/var/lib/dehydrated/certificates/certs/{{ item.item }}/privkey.pem state=link + with_items: "{{ wh_letsencrypt_certs.results }}" + when: + - item.stat is defined + - not item.stat.exists + tags: web + diff --git a/roles/unmaintained/wh_proxy/templates/nginx_vhosts.conf.j2 b/roles/unmaintained/wh_proxy/templates/nginx_vhosts.conf.j2 new file mode 100644 index 0000000..a6f1ef4 --- /dev/null +++ b/roles/unmaintained/wh_proxy/templates/nginx_vhosts.conf.j2 @@ -0,0 +1,93 @@ +# {{ ansible_managed }} + +{% for client in wh_clients | default([]) %} +{% for app in client.apps | default([]) %} +{% set app = wh_default_app | combine(app, recursive=True) %} + +server { + listen 80; + listen 443 ssl http2; + + ssl_certificate /var/lib/dehydrated/certificates/certs/{{ app.vhost | default(client.name + '-' + app.name + '.wh.fws.fr') }}/fullchain.pem; + ssl_certificate_key /var/lib/dehydrated/certificates/certs/{{ app.vhost | default(client.name + '-' + app.name + '.wh.fws.fr') }}/privkey.pem; + + ssl_protocols TLSv1.2 TLSv1.3; + ssl_ciphers ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256; + + server_name {{ app.vhost | default(client.name + '-' + app.name + '.wh.fws.fr') }} {{ app.aliases | join(' ') }}; + + root /usr/share/nginx/html; + +{% if app.maintenance %} + include /etc/nginx/ansible_conf.d/maintenance.inc; +{% endif %} + + # All client's vhost will use http-01 ACME challenges + include /etc/nginx/ansible_conf.d/acme.inc; + + # Ensure SSL is used + include /etc/nginx/ansible_conf.d/force_ssl.inc; + + location / { + limit_req zone=limit_req_std burst=200 nodelay; + limit_conn limit_conn_std 100; + + include /etc/nginx/ansible_conf.d/perf.inc; + + include /etc/nginx/ansible_conf.d/cache.inc; + +{% if app.proxy_custom_rewrites is defined %} +{{ app.proxy_custom_rewrites | indent(4,true) }} +{% endif %} + + # Send the original Host header to the backend + proxy_set_header Host "$host"; + + # Send info about the original request to the backend + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-Proto "$scheme"; + + # Handle websocket proxying + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $connection_upgrade; + proxy_http_version 1.1; + + # Hide some headers sent by the backend + proxy_hide_header X-Powered-By; + proxy_hide_header Cache-Control; + proxy_hide_header Pragma; + proxy_hide_header Expires; + + # Set the timeout to read responses from the backend + proxy_read_timeout {{ app.php.max_execution_time }}s; + + # Disable buffering large files + proxy_max_temp_file_size 5m; + + # Proxy requests to the backend + proxy_pass http://{{ app.backend | default(client.backend) | default(wh_defaults.backend) }}; + + # per vhost IP blacklist +{% for ip in app.deny_ip %} + deny {{ ip }}; +{% endfor %} + +{% if app.allow_ip | length > 0 %} + # per vhost IP restriction +{% for ip in app.allow_ip %} + allow {{ ip }}; +{% endfor %} + deny all; + +{% endif %} + } + + location = /RequestDenied { + return 403; + } +} + +{% endfor %} +{% endfor %} diff --git a/roles/unmaintained/wh_zimbra/handlers/main.yml b/roles/unmaintained/wh_zimbra/handlers/main.yml new file mode 100644 index 0000000..c91e905 --- /dev/null +++ b/roles/unmaintained/wh_zimbra/handlers/main.yml @@ -0,0 +1,3 @@ +--- +- name: start zmldapsync-wh + service: name=zmldapsync-wh state=started diff --git a/roles/unmaintained/wh_zimbra/meta/main.yml b/roles/unmaintained/wh_zimbra/meta/main.yml new file mode 100644 index 0000000..337c545 --- /dev/null +++ b/roles/unmaintained/wh_zimbra/meta/main.yml @@ -0,0 +1,4 @@ +--- +dependencies: + - role: wh_common + - role: zimbra diff --git a/roles/unmaintained/wh_zimbra/tasks/cas.yml b/roles/unmaintained/wh_zimbra/tasks/cas.yml new file mode 100644 index 0000000..0acd9b7 --- /dev/null +++ b/roles/unmaintained/wh_zimbra/tasks/cas.yml @@ -0,0 +1,116 @@ +--- +- name: Get or generate a pre authentication key + shell: | + KEY=$(/opt/zimbra/bin/zmprov getDomain {{ item }} zimbrapreauthkey | perl -ne '/^(?:zimbraP|p)reAuthKey: (.*)/ && print $1') + [ -z $KEY ] && KEY=$(/opt/zimbra/bin/zmprov generateDomainPreAuthKey {{ item }} | perl -ne '/^(?:zimbraP|p)reAuthKey: (.*)/ && print $1') + echo $KEY + become_user: zimbra + register: zcs_preauthkeys + changed_when: False + loop: "{{ wh_mail_domains }}" + tags: mail + +- name: Install preauth pages + template: src=../zimbra/templates/cas_preauth.jsp.j2 dest=/opt/zimbra/jetty/webapps/zimbra/public/preauth_{{ item.item }}.jsp owner=zimbra group=zimbra + loop: "{{ zcs_preauthkeys.results }}" + notify: restart zmmailboxd + tags: mail + +- name: Install admin preauth pages + template: src=../zimbra/templates/cas_preauth_admin.jsp.j2 dest=/opt/zimbra/jetty/webapps/zimbraAdmin/public/preauth_{{ item.item }}.jsp owner=zimbra group=zimbra + loop: "{{ zcs_preauthkeys.results }}" + notify: restart zmmailboxd + tags: mail + +- name: Configure CAS filters + blockinfile: + path: /opt/zimbra/jetty/etc/zimbra.web.xml.in + block: |2 + + + CasSingleSignOutFilter + org.jasig.cas.client.session.SingleSignOutFilter + + casServerUrlPrefix + https://sso-cl.fws.fr/cas + + + + + CasSingleSignOutFilter + /* + + + + org.jasig.cas.client.session.SingleSignOutHttpSessionListener + + + {% for item in wh_clients | default([]) %} + {% if item.mail is defined and item.mail.enabled is defined and item.mail.enabled and item.mail.domain is defined %} + + + CasAuthenticationFilter{{ item.name }} + org.jasig.cas.client.authentication.AuthenticationFilter + + casServerLoginUrl + https://sso-cl.fws.fr/cas/login + + + serverName + {{ item.mail.vhosts | first }} + + + + + CasAuthenticationFilter{{ item.name }} + /public/preauth_{{ item.mail.domain }}.jsp + + + + CasValidationFilter{{ item.name }} + org.jasig.cas.client.validation.Cas20ProxyReceivingTicketValidationFilter + + casServerUrlPrefix + https://sso-cl.fws.fr/cas + + + serverName + {{ item.mail.vhosts | first }} + + + redirectAfterValidation + true + + + + + CasValidationFilter{{ item.name }} + /* + + + {% else %} + + {% endif %} + {% endfor %} + + + CasHttpServletRequestWrapperFilter + org.jasig.cas.client.util.HttpServletRequestWrapperFilter + + + + CasHttpServletRequestWrapperFilter + /public/* + + + + + COOKIE + + marker: '' + insertafter: '' + validate: xmllint %s + notify: restart zmmailboxd + tags: zcs + diff --git a/roles/unmaintained/wh_zimbra/tasks/dkim.yml b/roles/unmaintained/wh_zimbra/tasks/dkim.yml new file mode 100644 index 0000000..e78d2c1 --- /dev/null +++ b/roles/unmaintained/wh_zimbra/tasks/dkim.yml @@ -0,0 +1,9 @@ +--- + +- name: Ensure every domain has a dkim key + shell: /opt/zimbra/libexec/zmdkimkeyutil -q -d {{ item }} || /opt/zimbra/libexec/zmdkimkeyutil -a -d {{ item }} + become_user: zimbra + loop: "{{ wh_mail_domains + wh_mail_aliases }}" + changed_when: False + tags: mail + diff --git a/roles/unmaintained/wh_zimbra/tasks/facts.yml b/roles/unmaintained/wh_zimbra/tasks/facts.yml new file mode 100644 index 0000000..d8e3a48 --- /dev/null +++ b/roles/unmaintained/wh_zimbra/tasks/facts.yml @@ -0,0 +1,5 @@ +--- + +- name: Build a list of Zimbra domains + set_fact: wh_mail_domains={{ wh_clients | selectattr('mail', 'defined') | selectattr('mail.enabled', 'equalto', True) | selectattr('mail.domain', 'defined') | map(attribute='mail.domain') | list }} + tags: mail diff --git a/roles/unmaintained/wh_zimbra/tasks/main.yml b/roles/unmaintained/wh_zimbra/tasks/main.yml new file mode 100644 index 0000000..747bdc6 --- /dev/null +++ b/roles/unmaintained/wh_zimbra/tasks/main.yml @@ -0,0 +1,9 @@ +--- + +- include: facts.yml + +- include: zmldapsync.yml + when: zcs_i_am_primary_ldap == True + +- include: cas.yml + when: "'mailbox' in zcs_enabled_components" diff --git a/roles/unmaintained/wh_zimbra/tasks/zmldapsync.yml b/roles/unmaintained/wh_zimbra/tasks/zmldapsync.yml new file mode 100644 index 0000000..f7845fb --- /dev/null +++ b/roles/unmaintained/wh_zimbra/tasks/zmldapsync.yml @@ -0,0 +1,22 @@ +--- +- name: Deploy LDAP sync configuration + template: src=zmldapsync-wh.yml.j2 dest=/opt/zimbra/conf/zmldapsync-wh.yml mode=600 + notify: start zmldapsync-wh + tags: mail + +- name: Deploy LDAP sync systemd units + template: src=zmldapsync-wh.{{ item }}.j2 dest=/etc/systemd/system/zmldapsync-wh.{{ item }} + loop: + - service + - timer + register: wh_zimbra_systemd_unit + tags: mail + +- name: Reload systemd + systemd: daemon_reload=True + when: wh_zimbra_systemd_unit.results | selectattr('changed','equalto',True) | list | length > 0 + tags: mail + +- name: Enable LDAP sync services + systemd: name=zmldapsync-wh.timer state=started enabled=True + tags: mail diff --git a/roles/unmaintained/wh_zimbra/templates/zmldapsync-wh.service.j2 b/roles/unmaintained/wh_zimbra/templates/zmldapsync-wh.service.j2 new file mode 100644 index 0000000..1857059 --- /dev/null +++ b/roles/unmaintained/wh_zimbra/templates/zmldapsync-wh.service.j2 @@ -0,0 +1,7 @@ +[Unit] +Description=Sync LDAP accounts into Zimbra for hosted clients + +[Service] +Type=oneshot +ExecStart=/opt/zimbra/bin/zmldapsync --config /opt/zimbra/conf/zmldapsync-wh.yml +TimeoutSec=300 diff --git a/roles/unmaintained/wh_zimbra/templates/zmldapsync-wh.timer.j2 b/roles/unmaintained/wh_zimbra/templates/zmldapsync-wh.timer.j2 new file mode 100644 index 0000000..ca52b87 --- /dev/null +++ b/roles/unmaintained/wh_zimbra/templates/zmldapsync-wh.timer.j2 @@ -0,0 +1,8 @@ +[Unit] +Description=Sync LDAP Users with Zimbra for hosted clients + +[Timer] +OnCalendar=*:0/15 + +[Install] +WantedBy=timers.target diff --git a/roles/unmaintained/wh_zimbra/templates/zmldapsync-wh.yml.j2 b/roles/unmaintained/wh_zimbra/templates/zmldapsync-wh.yml.j2 new file mode 100644 index 0000000..c57ea7a --- /dev/null +++ b/roles/unmaintained/wh_zimbra/templates/zmldapsync-wh.yml.j2 @@ -0,0 +1,58 @@ +--- +general: + notify: + from: zimbra@{{ ansible_domain }} + to: dani@fws.fr + +domains: +{% for client in wh_clients | default([]) %} +{% if client.mail is defined and client.mail.enabled and client.mail.enabled and client.mail.domain is defined %} + {{ client.mail.domain }}: + public_url: https://{{ client.mail.vhosts | first }} + admin_url: https://{{ client.mail.vhosts | first }}:9071/ + cas: + enabled: True + server_url: https://sso-cl.fws.fr/cas + ldap: + servers: + - ldap://dc3.fws.fr:389 + - ldap://dc1.fws.fr:389 + - ldap://dc2.fws.fr:389 + schema: ad + bind_dn: CN=Zimbra,OU=Apps,DC=fws,DC=fr + bind_pass: {{ vault_zimbra_ldap_bind_pass | quote }} + users: + base: OU={{ client.name }},OU=Clients,DC=fws,DC=fr + filter: "(&(objectClass=user)(mail=*))" + groups: + base: OU={{ client.name }},OU=Clients,DC=fws,DC=fr + zimbra: + create_if_missing: True + setup_ldap_auth: True +{% if client.mail.domain_aliases is defined and client.mail.domain_aliases | length > 0 %} + domain_aliases: +{% for alias in client.mail.domain_aliases %} + - {{ alias }} +{% endfor %} +{% endif %} + additional_domain_attrs: +{% if client.mail.vhosts is defined and client.mail.vhosts | length > 0 %} + zimbraVirtualHostname: +{% for vhost in client.mail.vhosts %} + - {{ vhost }} +{% endfor %} + zimbraPublicServiceHostname: {{ client.mail.vhosts | first }} + zimbraAdminConsoleLoginURL: https://{{ client.mail.vhosts | first }}:9071//zimbraAdmin/public/preauth_{{ client.mail.domain }}.jsp + zimbraWebClientLoginURL: https://{{ client.mail.vhosts | first }}/public/preauth_{{ client.mail.domain }}.jsp +{% else %} + zimbraPublicServiceHostname: zm-cl.fws.fr + zimbraAdminConsoleLoginURL: https://zm-cl.fws.fr:9071//zimbraAdmin/public/preauth_{{ client.mail.domain }}.jsp + zimbraWebClientLoginURL: https://zm-cl.fws.fr}/public/preauth_{{ client.mail.domain }}.jsp +{% endif %} + zimbraPublicServicePort: 443 + zimbraPublicServiceProtocol: https + zimbraAdminConsoleLogoutURL: https://sso-cl.fws.fr/cas/logout + zimbraWebClientLogoutURL: https://sso-cl.fws.fr/cas/logout + +{% endif %} +{% endfor %} diff --git a/roles/vaultwarden/tasks/main.yml b/roles/vaultwarden/tasks/main.yml index 549fbda..7fb809e 100644 --- a/roles/vaultwarden/tasks/main.yml +++ b/roles/vaultwarden/tasks/main.yml @@ -1,18 +1,29 @@ --- -- include: user.yml -- include: directories.yml -- include: facts.yml -- include: archive_pre.yml - when: vaultwarden_install_mode == 'upgrade' or vaultwarden_web_install_mode == 'upgrade' -- include: install.yml -- include: conf.yml -- include: migrate_bitwarden_rs.yml - when: vaultwarden_migrate_from_bitwarden -- include: iptables.yml +- include_tasks: user.yml + tags: always +- include_tasks: directories.yml + tags: always +- include_tasks: facts.yml + tags: always +- include_tasks: archive_pre.yml + when: vaultwarden_install_mode | default('none') == 'upgrade' or vaultwarden_web_install_mode | default('none') == 'upgrade' +- include_tasks: install.yml + tags: always +- include_tasks: conf.yml + tags: always +- include_tasks: migrate_bitwarden_rs.yml + when: vaultwarden_migrate_from_bitwarden | default(False) + tags: always +- include_tasks: iptables.yml when: iptables_manage | default(True) -- include: service.yml -- include: write_version.yml -- include: archive_post.yml - when: vaultwarden_install_mode == 'upgrade' or vaultwarden_web_install_mode == 'upgrade' -- include: cleanup.yml + tags: always +- include_tasks: service.yml + tags: always +- include_tasks: write_version.yml + tags: always +- include_tasks: archive_post.yml + when: vaultwarden_install_mode | default('none') == 'upgrade' or vaultwarden_web_install_mode | default('none') == 'upgrade' + tags: always +- include_tasks: cleanup.yml + tags: always diff --git a/roles/wbo/tasks/main.yml b/roles/wbo/tasks/main.yml index ab1822c..6197a9a 100644 --- a/roles/wbo/tasks/main.yml +++ b/roles/wbo/tasks/main.yml @@ -1,8 +1,12 @@ --- -- include: user.yml -- include: install.yml -- include: iptables.yml +- include_tasks: user.yml + tags: always +- include_tasks: install.yml + tags: always +- include_tasks: iptables.yml when: iptables_manage | default(True) -- include: services.yml + tags: always +- include_tasks: services.yml + tags: always diff --git a/roles/wordpress/tasks/main.yml b/roles/wordpress/tasks/main.yml index 388bc21..6b5456c 100644 --- a/roles/wordpress/tasks/main.yml +++ b/roles/wordpress/tasks/main.yml @@ -1,11 +1,18 @@ --- -- include: user.yml -- include: directories.yml -- include: facts.yml -- include: archive_pre.yml - when: wp_install_mode == 'upgrade' -- include: conf.yml -- include: install.yml -- include: archive_post.yml - when: wp_install_mode == 'upgrade' +- include_tasks: user.yml + tags: always +- include_tasks: directories.yml + tags: always +- include_tasks: facts.yml + tags: always +- include_tasks: archive_pre.yml + when: wp_install_mode | default('none') == 'upgrade' + tags: always +- include_tasks: conf.yml + tags: always +- include_tasks: install.yml + tags: always +- include_tasks: archive_post.yml + when: wp_install_mode | default('none') == 'upgrade' + tags: always diff --git a/roles/zabbix_agent/tasks/iptables.yml b/roles/zabbix_agent/tasks/iptables.yml index 13cba98..2ccb008 100644 --- a/roles/zabbix_agent/tasks/iptables.yml +++ b/roles/zabbix_agent/tasks/iptables.yml @@ -4,5 +4,4 @@ name: zabbix_agent_port state: "{{ (zabbix_agent_src_ip | length > 0) | ternary('present', 'absent') }}" rules: "-A INPUT -m state --state NEW -p tcp --dport {{ zabbix_agent_port }} -s {{ zabbix_agent_src_ip | join(',') }} -j ACCEPT" - when: iptables_manage | default(True) tags: zabbix diff --git a/roles/zabbix_agent/tasks/main.yml b/roles/zabbix_agent/tasks/main.yml index f54746f..8a4bdad 100644 --- a/roles/zabbix_agent/tasks/main.yml +++ b/roles/zabbix_agent/tasks/main.yml @@ -1,12 +1,22 @@ --- -- include: facts.yml -- include: install.yml -- include: install_{{ ansible_os_family }}.yml -- include: selinux.yml +- include_tasks: facts.yml + tags: always +- include_tasks: install.yml + tags: always +- include_tasks: install_{{ ansible_os_family }}.yml + tags: always +- include_tasks: selinux.yml when: ansible_selinux.status == 'enabled' -- include: conf.yml -- include: psk.yml -- include: sensors.yml -- include: iptables.yml -- include: service.yml + tags: always +- include_tasks: conf.yml + tags: always +- include_tasks: psk.yml + tags: always +- include_tasks: sensors.yml + tags: always +- include_tasks: iptables.yml + when: iptables_manage | default(True) + tags: always +- include_tasks: service.yml + tags: always diff --git a/roles/zabbix_agent/tasks/sensors.yml b/roles/zabbix_agent/tasks/sensors.yml index 880c8ac..7f15979 100644 --- a/roles/zabbix_agent/tasks/sensors.yml +++ b/roles/zabbix_agent/tasks/sensors.yml @@ -1,5 +1,6 @@ --- -- include: sensors_{{ ansible_os_family }}.yml +- include_tasks: sensors_{{ ansible_os_family }}.yml + tags: always - name: Check if hardware sensors should be detected stat: path=/etc/zabbix/sensors.ini diff --git a/roles/zabbix_lld_all_graph/tasks/main.yml b/roles/zabbix_lld_all_graph/tasks/main.yml index ff27271..28b1998 100644 --- a/roles/zabbix_lld_all_graph/tasks/main.yml +++ b/roles/zabbix_lld_all_graph/tasks/main.yml @@ -1,5 +1,8 @@ --- -- include: install.yml -- include: conf.yml -- include: services.yml +- include_tasks: install.yml + tags: always +- include_tasks: conf.yml + tags: always +- include_tasks: services.yml + tags: always diff --git a/roles/zabbix_proxy/tasks/iptables.yml b/roles/zabbix_proxy/tasks/iptables.yml index eb62bfc..920577b 100644 --- a/roles/zabbix_proxy/tasks/iptables.yml +++ b/roles/zabbix_proxy/tasks/iptables.yml @@ -4,5 +4,4 @@ name: zabbix_proxy_port state: "{{ (zabbix_proxy_src_ip | length > 0) | ternary('present','absent') }}" rules: "-A INPUT -m state --state NEW -p tcp --dport {{ zabbix_proxy_port | default('10051') }} -s {{ zabbix_proxy_src_ip | join(',') }} -j ACCEPT" - when: iptables_manage | default(True) tags: zabbix diff --git a/roles/zabbix_proxy/tasks/main.yml b/roles/zabbix_proxy/tasks/main.yml index 9bcd639..d688918 100644 --- a/roles/zabbix_proxy/tasks/main.yml +++ b/roles/zabbix_proxy/tasks/main.yml @@ -1,12 +1,20 @@ --- -- include: install.yml -- include: directories.yml -- include: upgrade.yml -- include: psk.yml -- include: selinux.yml +- include_tasks: install.yml + tags: always +- include_tasks: directories.yml + tags: always +- include_tasks: upgrade.yml + tags: always +- include_tasks: psk.yml + tags: always +- include_tasks: selinux.yml when: ansible_selinux.status == 'enabled' -- include: conf.yml -- include: iptables.yml -- include: service.yml + tags: always +- include_tasks: conf.yml + tags: always +- include_tasks: iptables.yml + when: iptables_manage | default(True) +- include_tasks: service.yml + tags: always diff --git a/roles/zabbix_server/tasks/main.yml b/roles/zabbix_server/tasks/main.yml index 4e0ba55..3e05c6f 100644 --- a/roles/zabbix_server/tasks/main.yml +++ b/roles/zabbix_server/tasks/main.yml @@ -1,11 +1,18 @@ --- -- include: facts.yml -- include: install.yml -- include: directories.yml -- include: selinux.yml +- include_tasks: facts.yml + tags: always +- include_tasks: install.yml + tags: always +- include_tasks: directories.yml + tags: always +- include_tasks: selinux.yml when: ansible_selinux.status == 'enabled' -- include: conf.yml -- include: iptables.yml -- include: service.yml + tags: always +- include_tasks: conf.yml + tags: always +- include_tasks: iptables.yml + tags: always +- include_tasks: service.yml + tags: always diff --git a/roles/zfs/tasks/main.yml b/roles/zfs/tasks/main.yml index 8337fc7..eb6fc11 100644 --- a/roles/zfs/tasks/main.yml +++ b/roles/zfs/tasks/main.yml @@ -1,6 +1,7 @@ --- -- include: install_{{ ansible_os_family }}.yml +- include_tasks: install_{{ ansible_os_family }}.yml + tags: always - name: load ZFS modprobe: name=zfs