diff --git a/roles/common/meta/main.yml b/roles/common/meta/main.yml index 7ebb06d..b77f673 100644 --- a/roles/common/meta/main.yml +++ b/roles/common/meta/main.yml @@ -6,6 +6,7 @@ dependencies: - role: repo_base when: ansible_os_family == 'RedHat' - role: network + when: (net_hosts is defined and net_hosts | length > 0) or (net_if is defined and net_if | length > 0) - role: iptables when: iptables_manage | default(True) - role: zabbix_agent diff --git a/roles/lemonldap_ng/files/logos/hoppscotch.png b/roles/lemonldap_ng/files/logos/hoppscotch.png new file mode 100644 index 0000000..3df4fd4 Binary files /dev/null and b/roles/lemonldap_ng/files/logos/hoppscotch.png differ diff --git a/roles/lemonldap_ng/files/logos/rallly.png b/roles/lemonldap_ng/files/logos/rallly.png new file mode 100644 index 0000000..49fb96f Binary files /dev/null and b/roles/lemonldap_ng/files/logos/rallly.png differ diff --git a/roles/nomad_bin/defaults/main.yml b/roles/nomad_bin/defaults/main.yml index 585f9dd..7a0ee4a 100644 --- a/roles/nomad_bin/defaults/main.yml +++ b/roles/nomad_bin/defaults/main.yml @@ -1,9 +1,9 @@ --- # Version of Nomad to install -nomad_version: 1.8.2 +nomad_version: 1.8.3 # URL of the archive nomad_archive_url: https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_amd64.zip # Expected sha256 of the archive -nomad_archive_sha256: 75300292073874e4051b9c135009c827b120375d15846427cc4954fc0400d2d6 +nomad_archive_sha256: a0c92d427fe8839bf3aab9c62b2d12190483f953a3483c08891e53f65f676797 diff --git a/roles/ntp_client/defaults/main.yml b/roles/ntp_client/defaults/main.yml index 0ae8b2a..e5ad9cc 100644 --- a/roles/ntp_client/defaults/main.yml +++ b/roles/ntp_client/defaults/main.yml @@ -3,8 +3,8 @@ ntp_src_ip: [] ntp_port: 123 ntp_servers: - - 0.centos.pool.ntp.org - - 1.centos.pool.ntp.org - - 2.centos.pool.ntp.org - - 3.centos.pool.ntp.org + - 0.fr.pool.ntp.org + - 1.fr.pool.ntp.org + - 2.fr.pool.ntp.org + - 3.fr.pool.ntp.org ... diff --git a/roles/pbs/tasks/services.yml b/roles/pbs/tasks/services.yml index 332885a..9dcb3c5 100644 --- a/roles/pbs/tasks/services.yml +++ b/roles/pbs/tasks/services.yml @@ -5,5 +5,4 @@ loop: - proxmox-backup - proxmox-backup-proxy - - proxmox-backup-banner tags: pbs diff --git a/roles/penpot/defaults/main.yml b/roles/penpot/defaults/main.yml index 6890ded..cf44574 100644 --- a/roles/penpot/defaults/main.yml +++ b/roles/penpot/defaults/main.yml @@ -1,7 +1,7 @@ --- # Penpot version to deploy -penpot_version: 2.1.1 +penpot_version: 2.1.2 # SHould ansible manage upgrades. If False, only the initial install will be done penpot_manage_upgrade: True @@ -10,7 +10,7 @@ penpot_root_dir: /opt/penpot # URL of the archive penpot_archive_url: https://github.com/penpot/penpot/archive/refs/tags/{{ penpot_version }}.tar.gz # Expected sha256 of the archive -penpot_archive_sha256: 0912464158ab8cdbef384686886de95b3318d81105522c3eeb9d703fea87fbf0 +penpot_archive_sha256: a61b1e57256c8070906f6cda1aee27b9b484f9e83bfdfacfecca512c0850dd02 # User under which penpot will run. Will be created penpot_user: penpot diff --git a/roles/pve/defaults/main.yml b/roles/pve/defaults/main.yml index a1be50e..5da981e 100644 --- a/roles/pve/defaults/main.yml +++ b/roles/pve/defaults/main.yml @@ -1,10 +1,5 @@ --- -pve_dump_storage: local - -pve_mod_to_load: - - nf_conntrack_proto_gre - pve_ksm: True pve_ksm_sleep_msec: 5 pve_ksm_thres_coef: 35 diff --git a/roles/pve/tasks/facts.yml b/roles/pve/tasks/facts.yml deleted file mode 100644 index d13fa46..0000000 --- a/roles/pve/tasks/facts.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- - -# The module is unavailable in PVE >= 6 -# as it's built in the kernel -- name: Do not load nf_conntrack_proto_gre for PVE6 - set_fact: pve_mod_to_load={{ pve_mod_to_load | difference(['nf_conntrack_proto_gre']) }} - when: ansible_distribution_major_version | int >= 10 - tags: pve diff --git a/roles/pve/tasks/main.yml b/roles/pve/tasks/main.yml index 6d15ef2..b15b1e5 100644 --- a/roles/pve/tasks/main.yml +++ b/roles/pve/tasks/main.yml @@ -1,22 +1,13 @@ --- -- include_tasks: facts.yml - tags: always - - name: Install tools apt: name: - - pigz - ksm-control-daemon - - openvswitch-switch - ethtool - patch tags: pve -- name: Deploy vzdump config - template: src=vzdump.conf.j2 dest=/etc/vzdump.conf - tags: pve - - name: Deploy ksm configuration template: src=ksmtuned.conf.j2 dest=/etc/ksmtuned.conf notify: restart ksmtuned @@ -26,70 +17,6 @@ service: name=ksmtuned state={{ pve_ksm | ternary('started','stopped') }} enabled={{ pve_ksm | ternary(True,False) }} tags: pve -- name: Configure modules to load - copy: content={{ pve_mod_to_load | join("\n") }} dest=/etc/modules-load.d/firewall.conf - register: pve_modules - tags: pve - -- name: Load modules - service: name=systemd-modules-load state=restarted - when: pve_modules.changed - tags: pve - -- name: Check proxmox cluster status - command: pvesh get /cluster/status --output-format=json - register: pve_cluster_status_1 - ignore_errors: True - changed_when: False - tags: pve - -- name: Parse proxmox cluster status - set_fact: pve_cluster={{ pve_cluster_status_1.stdout | from_json }} - when: pve_cluster_status_1.rc == 0 - tags: pve - -- name: Check proxmox cluster status (old pvesh) - command: pvesh get /cluster/status - when: pve_cluster_status_1.rc != 0 - register: pve_cluster_status_2 - changed_when: False - tags: pve - -- name: Parse proxmox cluster status (old pvesh) - set_fact: pve_cluster={{ pve_cluster_status_2.stdout | from_json }} - when: pve_cluster_status_1.rc != 0 - tags: pve - -- name: Deploy the unlock_dev script - copy: src=unlock_dev dest=/usr/local/bin/unlock_dev mode=755 - tags: pve - -- name: Check if the old hookd daemon is installed - stat: path=/usr/local/bin/pve-hookd - register: pve_old_hookd - tags: pve - -- name: Stop the old hookd daemon - service: name=pve-hookd state=stopped - when: pve_old_hookd.stat.exists - tags: pve - -- name: Remove the old hook daemon - file: path={{ item }} state=absent - loop: - - /usr/local/bin/pve-hookd - - /etc/hooks - - /etc/systemd/system/pve-hookd.service - - /etc/tmpfiles.d/pve-container-hooks.conf - - /etc/systemd/system/pve-container@.service.d/pve-container-hooks.conf - - /var/run/lxc/active - tags: pve - -- name: Reload systemd - command: systemctl daemon-reload - when: pve_old_hookd.stat.exists - tags: pve - - include_tasks: pve_online.yml when: pve_online == True tags: always @@ -113,26 +40,6 @@ notify: restart pveproxy tags: pve -- name: Rise limits for containers - pam_limits: - domain: '*' - limit_type: "{{ item.type }}" - limit_item: nofile - value: "{{ item.value }}" - loop: - - type: soft - value: 65000 - - type: hard - value: 65535 - tags: pve - -- name: Rise inotify instances - sysctl: - name: fs.inotify.max_user_instances - value: 1024 - sysctl_file: /etc/sysctl.d/ansible.conf - tags: pve - - name: Ensure dehydrated hook dir exists file: path=/etc/dehydrated/hooks_deploy_cert.d/ state=directory tags: pve,ssl @@ -141,26 +48,6 @@ template: src=dehydrated_hook.sh.j2 dest=/etc/dehydrated/hooks_deploy_cert.d/20pve.sh mode=755 tags: pve,ssl -# See https://bugzilla.proxmox.com/show_bug.cgi?id=2326 why -- name: Create corosync override directory - file: path=/etc/systemd/system/corosync.service.d/ state=directory - tags: pve - -- name: Setup corosync to be restarted in case of failure - copy: - content: | - [Service] - Restart=on-failure - RestartSec=1 - dest: /etc/systemd/system/corosync.service.d/ansible.conf - register: pve_corosync_unit - tags: pve - -- name: Reload systemd - systemd: daemon_reload=True - when: pve_corosync_unit.changed - tags: pve - - include_tasks: zabbix.yml tags: always diff --git a/roles/pve/tasks/zabbix.yml b/roles/pve/tasks/zabbix.yml index 44b98be..997a126 100644 --- a/roles/pve/tasks/zabbix.yml +++ b/roles/pve/tasks/zabbix.yml @@ -12,5 +12,5 @@ user: root job: "/var/lib/zabbix/bin/util_populate_pve_cache" minute: "*/5" - state: "{{ (pve_zabbix_cache and pve_zabbix_scripts.stat.exists) | ternary('present','absent') }}" + state: "{{ (pve_zabbix_cache and pve_zabbix_scripts.stat.exists and pve_zabbix_cache) | ternary('present','absent') }}" tags: pve,zabbix diff --git a/roles/repo_zabbix/tasks/Debian.yml b/roles/repo_zabbix/tasks/Debian.yml index 9c99d8e..b654afc 100644 --- a/roles/repo_zabbix/tasks/Debian.yml +++ b/roles/repo_zabbix/tasks/Debian.yml @@ -18,4 +18,5 @@ # Zabbix plugin repo doesn't have bookworm yet, so use bullseye for now in this case repo: deb http://repo.zabbix.com/zabbix-agent2-plugins/1/{{ ansible_distribution | lower }} {{ (ansible_distribution_major_version is version('12', '>=')) | ternary('bullseye', ansible_distribution_release) }} main filename: zabbix + state: absent tags: repo,zabbix diff --git a/roles/squash_tm/defaults/main.yml b/roles/squash_tm/defaults/main.yml index b3c1926..730297a 100644 --- a/roles/squash_tm/defaults/main.yml +++ b/roles/squash_tm/defaults/main.yml @@ -1,11 +1,11 @@ --- # Version of Shash TM to install -squashtm_version: 7.1.0 +squashtm_version: 7.2.0 # Archive URL which will be downloaded during install/upgrades squashtm_archive_url: https://nexus.squashtest.org/nexus/repository/public-releases/tm/core/squash-tm-distribution/{{ squashtm_version }}.RELEASE/squash-tm-{{ squashtm_version }}.RELEASE.tar.gz # Expected checksum of the archive -squashtm_archive_sha256: 2b61241782a38af287ca09f500bd90b3a4c6c06857129370e45fda6c3ad4e6f2 +squashtm_archive_sha256: 73f64a445f57cfca5e35fe93193261d5ef1fecba1132c8764adce3af1156b4ad # Should ansible handle upgrades ? (if False, only initial install and configuration will be done) squashtm_manage_upgrade: True