From 5f05c09d42a4e4651c1ebb9c72fc00ac958221be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20M=C3=A1gr?= Date: Tue, 3 Dec 2024 11:36:16 +0100 Subject: [PATCH 1/4] Move health check script deploy Minor updates are broken due to the fact that health check scripts are deployed in install.yml task lists while the update role is using run.yml task lists Resolves: OSPRH-12088 --- roles/edpm_iscsid/tasks/healthchecks.yml | 30 +++++++++++++++++++ roles/edpm_iscsid/tasks/install.yml | 10 ------- roles/edpm_iscsid/tasks/run.yml | 5 +++- .../tasks/healthchecks.yml | 30 +++++++++++++++++++ roles/edpm_logrotate_crond/tasks/install.yml | 19 ------------ roles/edpm_logrotate_crond/tasks/run.yml | 3 ++ roles/edpm_multipathd/tasks/healthchecks.yml | 30 +++++++++++++++++++ roles/edpm_multipathd/tasks/install.yml | 17 ----------- roles/edpm_multipathd/tasks/run.yml | 4 +++ .../tasks/healthchecks.yml | 30 +++++++++++++++++++ roles/edpm_neutron_metadata/tasks/install.yml | 11 ------- roles/edpm_neutron_metadata/tasks/run.yml | 4 +++ roles/edpm_ovn/tasks/healthchecks.yml | 30 +++++++++++++++++++ roles/edpm_ovn/tasks/install.yml | 12 -------- roles/edpm_ovn/tasks/run.yml | 4 +++ 15 files changed, 169 insertions(+), 70 deletions(-) create mode 100644 roles/edpm_iscsid/tasks/healthchecks.yml create mode 100644 roles/edpm_logrotate_crond/tasks/healthchecks.yml create mode 100644 roles/edpm_multipathd/tasks/healthchecks.yml create mode 100644 roles/edpm_neutron_metadata/tasks/healthchecks.yml create mode 100644 roles/edpm_ovn/tasks/healthchecks.yml diff --git a/roles/edpm_iscsid/tasks/healthchecks.yml b/roles/edpm_iscsid/tasks/healthchecks.yml new file mode 100644 index 000000000..ca5222b45 --- /dev/null +++ b/roles/edpm_iscsid/tasks/healthchecks.yml @@ -0,0 +1,30 @@ +--- + +- name: Gather user fact + ansible.builtin.setup: + gather_subset: + - "!all" + - "!min" + - "user" + when: + - ansible_user is undefined + +- name: Ensure base directory for health checks exists + become: true + ansible.builtin.file: + path: /var/lib/openstack/healthchecks + state: directory + setype: container_file_t + owner: "{{ ansible_user | default(ansible_user_id) }}" + group: "{{ ansible_user | default(ansible_user_id) }}" + mode: '0755' + +- name: Deploy iscsi health check script + become: true + ansible.builtin.copy: + src: healthchecks/iscsid/ + dest: "/var/lib/openstack/healthchecks/iscsid" + setype: container_file_t + owner: "{{ ansible_user | default(ansible_user_id) }}" + group: "{{ ansible_user | default(ansible_user_id) }}" + mode: '0700' diff --git a/roles/edpm_iscsid/tasks/install.yml b/roles/edpm_iscsid/tasks/install.yml index 21ca4cc1c..c5d38fa45 100644 --- a/roles/edpm_iscsid/tasks/install.yml +++ b/roles/edpm_iscsid/tasks/install.yml @@ -30,7 +30,6 @@ - {'path': /var/lib/iscsi, 'setype': container_file_t} - {'path': /var/lib/config-data, 'setype': container_file_t, 'selevel': s0, 'mode': '0755'} - {'path': /var/lib/config-data/ansible-generated/iscsid, 'setype': container_file_t, 'mode': '0755'} - - {'path': /var/lib/openstack/healthchecks, 'setype': container_file_t, 'mode': '0755'} - name: Stat /lib/systemd/system/iscsid.socket ansible.builtin.stat: @@ -56,12 +55,3 @@ - ansible_facts.services["iscsi.service"] is defined - ansible_facts.services["iscsi.service"]["status"] != "not-found" - ansible_facts.services["iscsi.service"]["status"] == "enabled" - - - name: Deploy iscsi health check script - ansible.builtin.copy: - src: healthchecks/iscsid/ - dest: "/var/lib/openstack/healthchecks/iscsid" - setype: container_file_t - owner: "{{ ansible_user | default(ansible_user_id) }}" - group: "{{ ansible_user | default(ansible_user_id) }}" - mode: '0700' diff --git a/roles/edpm_iscsid/tasks/run.yml b/roles/edpm_iscsid/tasks/run.yml index 60bdca503..7d3bddee3 100644 --- a/roles/edpm_iscsid/tasks/run.yml +++ b/roles/edpm_iscsid/tasks/run.yml @@ -19,6 +19,10 @@ name: edpm_container_manage tasks_from: shutdown.yml +- name: Update iscsi health check script + ansible.builtin.include_tasks: + file: healthchecks.yml + - name: Manage iscsid containers ansible.builtin.include_role: name: edpm_container_standalone @@ -52,7 +56,6 @@ state: restarted - name: Remove iscsid container restart sentinel file - become: true ansible.builtin.file: path: /etc/iscsi/.iscsid_restart_required state: absent diff --git a/roles/edpm_logrotate_crond/tasks/healthchecks.yml b/roles/edpm_logrotate_crond/tasks/healthchecks.yml new file mode 100644 index 000000000..dc43a92ed --- /dev/null +++ b/roles/edpm_logrotate_crond/tasks/healthchecks.yml @@ -0,0 +1,30 @@ +--- + +- name: Gather user fact + ansible.builtin.setup: + gather_subset: + - "!all" + - "!min" + - "user" + when: + - ansible_user is undefined + +- name: Ensure base directory for health checks exists + become: true + ansible.builtin.file: + path: /var/lib/openstack/healthchecks + state: directory + setype: container_file_t + owner: "{{ ansible_user | default(ansible_user_id) }}" + group: "{{ ansible_user | default(ansible_user_id) }}" + mode: '0755' + +- name: Deploy logrotate_crond health check script + become: true + ansible.builtin.copy: + src: healthchecks/logrotate_crond/ + dest: /var/lib/openstack/healthchecks/logrotate_crond + setype: container_file_t + owner: "{{ ansible_user | default(ansible_user_id) }}" + group: "{{ ansible_user | default(ansible_user_id) }}" + mode: '0700' diff --git a/roles/edpm_logrotate_crond/tasks/install.yml b/roles/edpm_logrotate_crond/tasks/install.yml index bea292b58..ef51fd58a 100644 --- a/roles/edpm_logrotate_crond/tasks/install.yml +++ b/roles/edpm_logrotate_crond/tasks/install.yml @@ -53,22 +53,3 @@ - ansible_facts.selinux is defined - ansible_facts.selinux.status == "enabled" -- name: Create a directory for container health checks - ansible.builtin.file: - path: /var/lib/openstack/healthchecks - state: directory - setype: container_file_t - owner: "{{ ansible_user | default(ansible_user_id) }}" - group: "{{ ansible_user | default(ansible_user_id) }}" - mode: '0755' - become: true - -- name: Deploy logrotate_crond health check script - ansible.builtin.copy: - src: healthchecks/logrotate_crond/ - dest: /var/lib/openstack/healthchecks/logrotate_crond - setype: container_file_t - owner: "{{ ansible_user | default(ansible_user_id) }}" - group: "{{ ansible_user | default(ansible_user_id) }}" - mode: '0700' - become: true diff --git a/roles/edpm_logrotate_crond/tasks/run.yml b/roles/edpm_logrotate_crond/tasks/run.yml index a501b119a..923194e97 100644 --- a/roles/edpm_logrotate_crond/tasks/run.yml +++ b/roles/edpm_logrotate_crond/tasks/run.yml @@ -14,6 +14,9 @@ # License for the specific language governing permissions and limitations # under the License. +- name: Update logrotate_crond health check script + ansible.builtin.include_tasks: + file: healthchecks.yml - name: Manage logrotate_crond containers ansible.builtin.include_role: diff --git a/roles/edpm_multipathd/tasks/healthchecks.yml b/roles/edpm_multipathd/tasks/healthchecks.yml new file mode 100644 index 000000000..66b51839b --- /dev/null +++ b/roles/edpm_multipathd/tasks/healthchecks.yml @@ -0,0 +1,30 @@ +--- + +- name: Gather user fact + ansible.builtin.setup: + gather_subset: + - "!all" + - "!min" + - "user" + when: + - ansible_user is undefined + +- name: Ensure base directory for health checks exists + become: true + ansible.builtin.file: + path: /var/lib/openstack/healthchecks + state: directory + setype: container_file_t + owner: "{{ ansible_user | default(ansible_user_id) }}" + group: "{{ ansible_user | default(ansible_user_id) }}" + mode: '0755' + +- name: Deploy multipathd health check script + become: true + ansible.builtin.copy: + src: healthchecks/multipathd/ + dest: /var/lib/openstack/healthchecks/multipathd + setype: container_file_t + owner: "{{ ansible_user | default(ansible_user_id) }}" + group: "{{ ansible_user | default(ansible_user_id) }}" + mode: '0700' diff --git a/roles/edpm_multipathd/tasks/install.yml b/roles/edpm_multipathd/tasks/install.yml index 14e62b58e..c4214cf63 100644 --- a/roles/edpm_multipathd/tasks/install.yml +++ b/roles/edpm_multipathd/tasks/install.yml @@ -62,20 +62,3 @@ mode: "0755" setype: container_file_t - - name: Create a directory for container health checks - ansible.builtin.file: - path: /var/lib/openstack/healthchecks - state: directory - setype: container_file_t - owner: "{{ ansible_user | default(ansible_user_id) }}" - group: "{{ ansible_user | default(ansible_user_id) }}" - mode: '0755' - - - name: Deploy multipathd health check script - ansible.builtin.copy: - src: healthchecks/multipathd/ - dest: /var/lib/openstack/healthchecks/multipathd - setype: container_file_t - owner: "{{ ansible_user | default(ansible_user_id) }}" - group: "{{ ansible_user | default(ansible_user_id) }}" - mode: '0700' diff --git a/roles/edpm_multipathd/tasks/run.yml b/roles/edpm_multipathd/tasks/run.yml index 2fb7ec5d5..42988f15a 100644 --- a/roles/edpm_multipathd/tasks/run.yml +++ b/roles/edpm_multipathd/tasks/run.yml @@ -19,6 +19,10 @@ name: edpm_container_manage tasks_from: shutdown.yml +- name: Update multipathd health check script + ansible.builtin.include_tasks: + file: healthchecks.yml + - name: Manage multipathd containers ansible.builtin.include_role: name: edpm_container_standalone diff --git a/roles/edpm_neutron_metadata/tasks/healthchecks.yml b/roles/edpm_neutron_metadata/tasks/healthchecks.yml new file mode 100644 index 000000000..2390f05a8 --- /dev/null +++ b/roles/edpm_neutron_metadata/tasks/healthchecks.yml @@ -0,0 +1,30 @@ +--- + +- name: Gather user fact + ansible.builtin.setup: + gather_subset: + - "!all" + - "!min" + - "user" + when: + - ansible_user is undefined + +- name: Ensure base directory for health checks exists + become: true + ansible.builtin.file: + path: /var/lib/openstack/healthchecks + state: directory + setype: container_file_t + owner: "{{ ansible_user | default(ansible_user_id) }}" + group: "{{ ansible_user | default(ansible_user_id) }}" + mode: '0755' + +- name: Deploy ovn_metadata_agent health check script + become: true + ansible.builtin.copy: + src: healthchecks/ovn_metadata_agent/ + dest: /var/lib/openstack/healthchecks/ovn_metadata_agent + setype: container_file_t + owner: "{{ ansible_user | default(ansible_user_id) }}" + group: "{{ ansible_user | default(ansible_user_id) }}" + mode: '0700' diff --git a/roles/edpm_neutron_metadata/tasks/install.yml b/roles/edpm_neutron_metadata/tasks/install.yml index 086291012..8b063ef85 100644 --- a/roles/edpm_neutron_metadata/tasks/install.yml +++ b/roles/edpm_neutron_metadata/tasks/install.yml @@ -37,7 +37,6 @@ - {'path': "{{ edpm_neutron_metadata_agent_lib_dir }}/kill_scripts", "mode": "0755"} - {'path': "{{ edpm_neutron_metadata_agent_lib_dir }}/ovn-metadata-proxy", "mode": "0755"} - {'path': "{{ edpm_neutron_metadata_agent_lib_dir }}/external/pids", "mode": "0755"} - - {'path': /var/lib/openstack/healthchecks, "mode": "0755"} - name: Gather SELinux fact if needed when: @@ -75,13 +74,3 @@ mode: "0755" with_items: - {"src": "wrappers/kill-script.j2", "dest": "haproxy-kill"} - -- name: Deploy ovn_metadata_agent health check script - ansible.builtin.copy: - src: healthchecks/ovn_metadata_agent/ - dest: /var/lib/openstack/healthchecks/ovn_metadata_agent - setype: container_file_t - owner: "{{ ansible_user | default(ansible_user_id) }}" - group: "{{ ansible_user | default(ansible_user_id) }}" - mode: '0700' - become: true diff --git a/roles/edpm_neutron_metadata/tasks/run.yml b/roles/edpm_neutron_metadata/tasks/run.yml index f2e0ac6f9..bc46b251d 100644 --- a/roles/edpm_neutron_metadata/tasks/run.yml +++ b/roles/edpm_neutron_metadata/tasks/run.yml @@ -32,6 +32,10 @@ name: edpm_container_manage tasks_from: shutdown.yml +- name: Update ovn_metadata_agent health check script + ansible.builtin.include_tasks: + file: healthchecks.yml + - name: Run ovn_metadata_agent container ansible.builtin.include_role: name: osp.edpm.edpm_container_standalone diff --git a/roles/edpm_ovn/tasks/healthchecks.yml b/roles/edpm_ovn/tasks/healthchecks.yml new file mode 100644 index 000000000..2af7fc88f --- /dev/null +++ b/roles/edpm_ovn/tasks/healthchecks.yml @@ -0,0 +1,30 @@ +--- + +- name: Gather user fact + ansible.builtin.setup: + gather_subset: + - "!all" + - "!min" + - "user" + when: + - ansible_user is undefined + +- name: Ensure base directory for health checks exists + become: true + ansible.builtin.file: + path: /var/lib/openstack/healthchecks + state: directory + setype: container_file_t + owner: "{{ ansible_user | default(ansible_user_id) }}" + group: "{{ ansible_user | default(ansible_user_id) }}" + mode: '0755' + +- name: Deploy ovn_controller health check script + become: true + ansible.builtin.copy: + src: healthchecks/ovn_controller/ + dest: /var/lib/openstack/healthchecks/ovn_controller + setype: container_file_t + owner: "{{ ansible_user | default(ansible_user_id) }}" + group: "{{ ansible_user | default(ansible_user_id) }}" + mode: '0700' \ No newline at end of file diff --git a/roles/edpm_ovn/tasks/install.yml b/roles/edpm_ovn/tasks/install.yml index 1e60be907..1a0b22706 100644 --- a/roles/edpm_ovn/tasks/install.yml +++ b/roles/edpm_ovn/tasks/install.yml @@ -34,8 +34,6 @@ loop: - {'path': /var/lib/edpm-config/firewall, 'mode': '0750'} - {'path': /var/lib/openvswitch/ovn, "owner": "openvswitch", "group": "openvswitch"} - - {'path': /var/lib/openstack/healthchecks, 'mode': '0755'} - - name: Gather SELinux fact if needed when: @@ -55,13 +53,3 @@ when: - ansible_facts.selinux is defined - ansible_facts.selinux.status == "enabled" - -- name: Deploy ovn_controller health check script - ansible.builtin.copy: - src: healthchecks/ovn_controller/ - dest: /var/lib/openstack/healthchecks/ovn_controller - setype: container_file_t - owner: "{{ ansible_user | default(ansible_user_id) }}" - group: "{{ ansible_user | default(ansible_user_id) }}" - mode: '0700' - become: true diff --git a/roles/edpm_ovn/tasks/run.yml b/roles/edpm_ovn/tasks/run.yml index 59a417631..2571db040 100644 --- a/roles/edpm_ovn/tasks/run.yml +++ b/roles/edpm_ovn/tasks/run.yml @@ -32,6 +32,10 @@ name: edpm_container_manage tasks_from: shutdown.yml +- name: Update ovn_controller health check script + ansible.builtin.include_tasks: + file: healthchecks.yml + - name: Run ovn_controller container ansible.builtin.include_role: name: osp.edpm.edpm_container_standalone From b0b47e5cde25fffabab68e1b78cf361e2fc015b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20M=C3=A1gr?= Date: Wed, 4 Dec 2024 11:51:27 +0100 Subject: [PATCH 2/4] Disable podman_exporter deployment by default We need to disable podman_exporter deployment by default until we get the exporter image downstream. Resolves: OSPRH-12089 --- roles/edpm_telemetry/defaults/main.yml | 4 ++ roles/edpm_telemetry/meta/argument_specs.yml | 4 ++ roles/edpm_telemetry/tasks/exporter.yml | 26 +++++++ roles/edpm_telemetry/tasks/install.yml | 73 ++++---------------- 4 files changed, 46 insertions(+), 61 deletions(-) create mode 100644 roles/edpm_telemetry/tasks/exporter.yml diff --git a/roles/edpm_telemetry/defaults/main.yml b/roles/edpm_telemetry/defaults/main.yml index 6fd3e2818..b8400555d 100644 --- a/roles/edpm_telemetry/defaults/main.yml +++ b/roles/edpm_telemetry/defaults/main.yml @@ -49,3 +49,7 @@ edpm_telemetry_healthcheck_sources: # kepler: exporter # If telemetry services should have health checks enabled edpm_telemetry_healthcheck: true +# List of ceilometer agents to be stopped during EDPM adoption +edpm_telemetry_enabled_exporters: + - ceilometer_agent_compute + - node_exporter \ No newline at end of file diff --git a/roles/edpm_telemetry/meta/argument_specs.yml b/roles/edpm_telemetry/meta/argument_specs.yml index f55c8ac7a..70b73c6fd 100644 --- a/roles/edpm_telemetry/meta/argument_specs.yml +++ b/roles/edpm_telemetry/meta/argument_specs.yml @@ -58,3 +58,7 @@ argument_specs: Contains information about distribution of container health check scripts. Keys state for container names and value is name of a script directory from module's files directory. + edpm_telemetry_enabled_exporters: + type: list + required: true + description: "List of exporters to be deployed in the compute node" diff --git a/roles/edpm_telemetry/tasks/exporter.yml b/roles/edpm_telemetry/tasks/exporter.yml new file mode 100644 index 000000000..b92ecea5a --- /dev/null +++ b/roles/edpm_telemetry/tasks/exporter.yml @@ -0,0 +1,26 @@ +--- + +- name: Deploy health check script + ansible.builtin.copy: + src: "healthchecks/{{ edpm_telemetry_healthcheck_sources[exporter] }}/" + dest: "/var/lib/openstack/healthchecks/{{ exporter }}" + setype: container_file_t + owner: "{{ ansible_user | default(ansible_user_id) }}" + group: "{{ ansible_user | default(ansible_user_id) }}" + mode: '0700' + become: true + +- name: Deploy exporter container + ansible.builtin.include_role: + name: osp.edpm.edpm_container_manage + vars: + edpm_container_manage_config: "{{ edpm_telemetry_config_dest }}" + edpm_container_manage_healthcheck_disabled: true + edpm_container_manage_config_patterns: "{{ exporter }}.json" + edpm_container_manage_clean_orphans: false + +- name: Restart exporter container + become: true + ansible.builtin.systemd: + state: restarted + name: "edpm_{{ exporter }}.service" \ No newline at end of file diff --git a/roles/edpm_telemetry/tasks/install.yml b/roles/edpm_telemetry/tasks/install.yml index e30125be8..500c9f790 100644 --- a/roles/edpm_telemetry/tasks/install.yml +++ b/roles/edpm_telemetry/tasks/install.yml @@ -24,69 +24,20 @@ mode: '0755' become: true -- name: Deploy telemetry health check scripts - ansible.builtin.copy: - src: "healthchecks/{{ item.value }}/" - dest: "/var/lib/openstack/healthchecks/{{ item.key }}" - setype: container_file_t - owner: "{{ ansible_user | default(ansible_user_id) }}" - group: "{{ ansible_user | default(ansible_user_id) }}" - mode: '0700' +- name: Start podman.socket service + ansible.builtin.systemd_service: + name: podman.socket + state: started + enabled: true become: true - loop: "{{ edpm_telemetry_healthcheck_sources | dict2items }}" - -- name: Deploy ceilometer containers - ansible.builtin.include_role: - name: osp.edpm.edpm_container_manage - vars: - edpm_container_manage_config: "{{ edpm_telemetry_config_dest }}" - edpm_container_manage_healthcheck_disabled: true - edpm_container_manage_config_patterns: "ceilometer_agent_compute.json" - edpm_container_manage_clean_orphans: false + when: '"podman_exporter" in edpm_telemetry_enabled_exporters' -- name: Deploy node_exporter container - ansible.builtin.include_role: - name: osp.edpm.edpm_container_manage - vars: - edpm_container_manage_config: "{{ edpm_telemetry_config_dest }}" - edpm_container_manage_healthcheck_disabled: true - edpm_container_manage_config_patterns: "node_exporter.json" - edpm_container_manage_clean_orphans: false - -- name: Deploy podman_exporter container - block: - - name: Start podman.socket service - ansible.builtin.systemd_service: - name: podman.socket - state: started - enabled: true - become: true - - name: Start podman_exporter container - ansible.builtin.include_role: - name: osp.edpm.edpm_container_manage - vars: - edpm_container_manage_config: "{{ edpm_telemetry_config_dest }}" - edpm_container_manage_healthcheck_disabled: true - edpm_container_manage_config_patterns: "podman_exporter.json" - edpm_container_manage_clean_orphans: false - -- name: Restart node_exporter - become: true - ansible.builtin.systemd: - state: restarted - name: edpm_node_exporter.service - -- name: Restart podman_exporter - become: true - ansible.builtin.systemd: - state: restarted - name: edpm_podman_exporter.service - -- name: Restart ceilometer compute - become: true - ansible.builtin.systemd: - state: restarted - name: edpm_ceilometer_agent_compute.service +- name: Deploy enabled exporters + ansible.builtin.include_tasks: + file: exporter.yml + loop: "{{ edpm_telemetry_enabled_exporters }}" + loop_control: + loop_var: exporter - name: List deployed health check scripts ansible.builtin.find: From af0c35d57abd2332ab49f1dec60bcbf3f573c63e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20M=C3=A1gr?= Date: Wed, 4 Dec 2024 22:44:58 +0100 Subject: [PATCH 3/4] Fixed issues from code review --- roles/edpm_iscsid/tasks/run.yml | 1 + roles/edpm_ovn/tasks/healthchecks.yml | 2 +- roles/edpm_telemetry/defaults/main.yml | 4 ++-- roles/edpm_telemetry/tasks/exporter.yml | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/roles/edpm_iscsid/tasks/run.yml b/roles/edpm_iscsid/tasks/run.yml index 7d3bddee3..4ad60cbe1 100644 --- a/roles/edpm_iscsid/tasks/run.yml +++ b/roles/edpm_iscsid/tasks/run.yml @@ -56,6 +56,7 @@ state: restarted - name: Remove iscsid container restart sentinel file + become: true ansible.builtin.file: path: /etc/iscsi/.iscsid_restart_required state: absent diff --git a/roles/edpm_ovn/tasks/healthchecks.yml b/roles/edpm_ovn/tasks/healthchecks.yml index 2af7fc88f..35386c1b3 100644 --- a/roles/edpm_ovn/tasks/healthchecks.yml +++ b/roles/edpm_ovn/tasks/healthchecks.yml @@ -27,4 +27,4 @@ setype: container_file_t owner: "{{ ansible_user | default(ansible_user_id) }}" group: "{{ ansible_user | default(ansible_user_id) }}" - mode: '0700' \ No newline at end of file + mode: '0700' diff --git a/roles/edpm_telemetry/defaults/main.yml b/roles/edpm_telemetry/defaults/main.yml index b8400555d..73615230f 100644 --- a/roles/edpm_telemetry/defaults/main.yml +++ b/roles/edpm_telemetry/defaults/main.yml @@ -49,7 +49,7 @@ edpm_telemetry_healthcheck_sources: # kepler: exporter # If telemetry services should have health checks enabled edpm_telemetry_healthcheck: true -# List of ceilometer agents to be stopped during EDPM adoption +# List of exporters to be deployed in the compute node edpm_telemetry_enabled_exporters: - ceilometer_agent_compute - - node_exporter \ No newline at end of file + - node_exporter diff --git a/roles/edpm_telemetry/tasks/exporter.yml b/roles/edpm_telemetry/tasks/exporter.yml index b92ecea5a..d125d2718 100644 --- a/roles/edpm_telemetry/tasks/exporter.yml +++ b/roles/edpm_telemetry/tasks/exporter.yml @@ -23,4 +23,4 @@ become: true ansible.builtin.systemd: state: restarted - name: "edpm_{{ exporter }}.service" \ No newline at end of file + name: "edpm_{{ exporter }}.service" From 60c6b7361ae3d9d905c398d40616c099e0a3ab20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20M=C3=A1gr?= Date: Wed, 4 Dec 2024 23:21:01 +0100 Subject: [PATCH 4/4] Fixed pre-commit issues --- roles/edpm_logrotate_crond/tasks/install.yml | 1 - roles/edpm_multipathd/tasks/install.yml | 1 - 2 files changed, 2 deletions(-) diff --git a/roles/edpm_logrotate_crond/tasks/install.yml b/roles/edpm_logrotate_crond/tasks/install.yml index ef51fd58a..1990cc994 100644 --- a/roles/edpm_logrotate_crond/tasks/install.yml +++ b/roles/edpm_logrotate_crond/tasks/install.yml @@ -52,4 +52,3 @@ when: - ansible_facts.selinux is defined - ansible_facts.selinux.status == "enabled" - diff --git a/roles/edpm_multipathd/tasks/install.yml b/roles/edpm_multipathd/tasks/install.yml index c4214cf63..6ccca3698 100644 --- a/roles/edpm_multipathd/tasks/install.yml +++ b/roles/edpm_multipathd/tasks/install.yml @@ -61,4 +61,3 @@ state: directory mode: "0755" setype: container_file_t -