From 0efe5c6f03d09eb06fa1cf7bbe42eb0fa198f09a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 26 Oct 2023 14:45:25 +0100 Subject: [PATCH 001/174] feat: first vd tests scan structure --- .../wazuh_testing/tools/system.py | 100 ++++++++- .../e2e_vulnerability_detector.yaml | 24 ++ .../vulnerability_detector_tests.yaml | 32 +++ tests/end_to_end/conftest.py | 4 +- .../test_vulnerability_detector/cases.yaml | 17 ++ .../data/configurations/agent.yaml | 24 ++ .../data/configurations/manager.yaml | 79 +++++++ .../test_vulnerability_detector/playbook.yaml | 9 + .../test_vulnerability_detector/test_scans.py | 211 ++++++++++++++++++ .../environment_test_dictionary.json | 3 + 10 files changed, 497 insertions(+), 6 deletions(-) create mode 100644 provisioning/environments/e2e_vulnerability_detector.yaml create mode 100644 provisioning/environments/vulnerability_detector_tests.yaml create mode 100644 tests/end_to_end/test_vulnerability_detector/cases.yaml create mode 100644 tests/end_to_end/test_vulnerability_detector/data/configurations/agent.yaml create mode 100644 tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml create mode 100644 tests/end_to_end/test_vulnerability_detector/playbook.yaml create mode 100644 tests/end_to_end/test_vulnerability_detector/test_scans.py diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index e8e2c4dec3..e699948110 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -6,13 +6,15 @@ import tempfile import xml.dom.minidom as minidom from typing import Union - +import base64 import testinfra import yaml from wazuh_testing.tools import WAZUH_CONF, WAZUH_API_CONF, API_LOG_FILE_PATH, WAZUH_LOCAL_INTERNAL_OPTIONS from wazuh_testing.tools.configuration import set_section_wazuh_conf - +from ansible.inventory.manager import InventoryManager +from ansible.parsing.dataloader import DataLoader +from ansible.vars.manager import VariableManager class HostManager: """This class is an extensible remote host management interface. Within this we have multiple functions to modify @@ -32,6 +34,17 @@ def __init__(self, inventory_path: str): except (OSError, yaml.YAMLError) as inventory_err: raise ValueError(f"Could not open/load Ansible inventory '{self.inventory_path}': {inventory_err}") + + data_loader = DataLoader() + self.inventory_manager = InventoryManager(loader=data_loader, sources=inventory_path) + self.hosts_variables = {} + + variable_manager = VariableManager(loader=data_loader, inventory=self.inventory_manager) + + for host in self.inventory_manager.get_hosts(): + self.hosts_variables[host] = variable_manager.get_vars(host=self.inventory_manager.get_host(str(host))) + + def get_inventory(self) -> dict: """Get the loaded Ansible inventory. @@ -40,6 +53,34 @@ def get_inventory(self) -> dict: """ return self.inventory + + def get_group_hosts(self, pattern=None): + """Get all hosts from inventory that belong to a group. + + Args: + group (str): Group name + + Returns: + list: List of hosts + """ + if pattern: + return [str(host) for host in self.inventory_manager.get_hosts(pattern=pattern)] + else: + return [str(host) for host in self.inventory_manager.get_hosts()] + + def get_host_variables(self, host): + """Get the variables of the specified host. + + Args: + host (str): Hostname + + Returns: + testinfra.modules.base.Ansible: Host instance from hostspec + """ + inventory_manager_host = self.inventory_manager.get_host(host) + + return self.hosts_variables[inventory_manager_host] + def get_host(self, host: str): """Get the Ansible object for communicating with the specified host. @@ -61,7 +102,7 @@ def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/o check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be applied. """ self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=0775", - check=check) + check=check, become=True) def add_block_to_file(self, host: str, path: str, replace: str, before: str, after, check: bool = False): """Add text block to desired file. @@ -136,7 +177,12 @@ def get_file_content(self, host: str, file_path: str): host (str): Hostname file_path (str) : Path of the file """ - return self.get_host(host).file(file_path).content_string + + # return self.get_host(host).file(file_path).content_string + testinfra_host = self.get_host(host) + result = testinfra_host.ansible("slurp", f"src='{file_path}'", check=False) + return base64.b64decode(result['content']).decode('utf-8') + def apply_config(self, config_yml_path: str, dest_path: str = WAZUH_CONF, clear_files: list = None, restart_services: list = None): @@ -338,6 +384,52 @@ def configure_local_internal_options(self, local_internal_options: dict): replace = replace + internal_option self.modify_file_content(target_host, WAZUH_LOCAL_INTERNAL_OPTIONS, replace) + def download_file(self, host, url, dest_path, mode='755'): + """ + - name: Download foo.conf + ansible.builtin.get_url: + url: http://example.com/path/file.conf + dest: /etc/foo.conf + mode: '0440' + """ + a = self.get_host(host).ansible("get_url", f"url={url} dest={dest_path} mode={mode}", check=False) + return a + + def install_package(self, host, url, package_manager): + result = False + if package_manager == 'apt': + a = self.get_host(host).ansible("apt", f"deb={url}", check=False) + if a['changed'] == True and a['stderr'] == '': + result = True + elif package_manager == 'yum': + a = self.get_host(host).ansible("yum", f"name={url} state=present sslverify=false disable_gpg_check=True", check=False) + if 'rc' in a and a['rc'] == 0 and a['changed'] == True: + result = True + + def remove_package(self, host, package_name, package_manager): + result = False + if package_manager == 'apt': + a = self.get_host(host).ansible("apt", f"name={package_name} state=absent", check=False) + if a['changed'] == True and a['stderr'] == '': + result = True + elif package_manager == 'yum': + a = self.get_host(host).ansible("yum", f"name={package_name} state=absent", check=False) + if 'rc' in a and a['rc'] == 0 and a['changed'] == True: + result = True + return result + + def handle_wazuh_services(self, host, operation): + os = self.get_host_variables(host)['os_name'] + binary_path = None + if os == 'windows': + self.get_host(host).ansible('ansible.windows.win_command', f"cmd=NET {operation} Wazuh", check=False) + else: + if os == 'linux': + binary_path = f"/var/ossec/bin/wazuh-control" + elif os == 'macos': + binary_path = f"/Library/Ossec/bin/wazuh-control" + self.get_host(host).ansible('ansible.builtin.command', f'cmd="{binary_path} {operation}"', check=False) + def clean_environment(host_manager, target_files): """Clears a series of files on target hosts managed by a host manager diff --git a/provisioning/environments/e2e_vulnerability_detector.yaml b/provisioning/environments/e2e_vulnerability_detector.yaml new file mode 100644 index 0000000000..a4447c0618 --- /dev/null +++ b/provisioning/environments/e2e_vulnerability_detector.yaml @@ -0,0 +1,24 @@ +manager1: + roles: [manager, filebeat, indexer] + os: ubuntu_22 + type: master + +manager2: + roles: [manager, filebeat] + os: ubuntu_22 + type: worker + +agent1: + roles: [agent] + os: centos_7 + manager: manager1 + +agent2: + roles: [agent] + os: windows_11 + manager: manager2 + +agent3: + roles: [agent] + os: ubuntu_22 + manager: manager1 diff --git a/provisioning/environments/vulnerability_detector_tests.yaml b/provisioning/environments/vulnerability_detector_tests.yaml new file mode 100644 index 0000000000..b236164db9 --- /dev/null +++ b/provisioning/environments/vulnerability_detector_tests.yaml @@ -0,0 +1,32 @@ +manager1: + roles: [manager, filebeat, indexer] + os: ubuntu_22 + type: master +resources: + cpu: 4 + memory: 8192 + +manager2: + roles: [manager, filebeat] + os: ubuntu_22 + type: worker + +agent1: + roles: [agent] + os: centos_7 + manager: manager1 + +agent2: + roles: [agent] + os: windows_11 + manager: manager1 + +agent3: + roles: [agent] + os: macos_1015 + manager: manager2 + +agent4: + roles: [agent] + os: ubuntu_22 + manager: manager2 diff --git a/tests/end_to_end/conftest.py b/tests/end_to_end/conftest.py index 32a4e450e2..d9dd577694 100644 --- a/tests/end_to_end/conftest.py +++ b/tests/end_to_end/conftest.py @@ -72,7 +72,7 @@ def validate_inventory(inventory_path, target_hosts): f"Read the {readme_file} file inside the E2E suite to build a valid inventory.") -@pytest.fixture(scope='session', autouse=True) +@pytest.fixture(scope='session') def validate_environments(request): """Fixture with session scope to validate the environments before run the E2E tests. @@ -155,7 +155,7 @@ def validate_environments(request): # -------------------------------------------------- End of Step 4 ------------------------------------------------- -@pytest.fixture(scope='module', autouse=True) +@pytest.fixture(scope='module', autouse=False) def run_specific_validations(request): """Fixture with module scope to validate the environment of an specific tests with specific validation tasks. diff --git a/tests/end_to_end/test_vulnerability_detector/cases.yaml b/tests/end_to_end/test_vulnerability_detector/cases.yaml new file mode 100644 index 0000000000..cd8b6a1ffc --- /dev/null +++ b/tests/end_to_end/test_vulnerability_detector/cases.yaml @@ -0,0 +1,17 @@ +- case: "Installation of a vulnerable package" + id: "install_package" + description: "Installation of a vulnerable package" + preconditions: null + body: + - install_package: + package: + centos: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm + ubuntu: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb + - check_alerts: + centos: '.*installed.*.*agent\".*\"name\":\"\":\"rclone\",\"arch\":\"amd64\",\"version\":\"1.49.5\"' + ubuntu: '.*New dpkg \\(Debian Package\\) installed.*.*agent\".*\"name\":\".*package\":\"rclone\",\"arch\":\"amd64\",\"version\":\"1.49.5\"' + teardown: + - remove_package: + package: + centos: rclone + ubuntu: rclone diff --git a/tests/end_to_end/test_vulnerability_detector/data/configurations/agent.yaml b/tests/end_to_end/test_vulnerability_detector/data/configurations/agent.yaml new file mode 100644 index 0000000000..32edc6424e --- /dev/null +++ b/tests/end_to_end/test_vulnerability_detector/data/configurations/agent.yaml @@ -0,0 +1,24 @@ +- sections: + - section: sca + elements: + - enabled: + value: 'no' + + - section: rootcheck + elements: + - disabled: + value: 'yes' + + - section: syscheck + elements: + - disabled: + value: 'no' + + - section: wodle + attributes: + - name: 'syscollector' + elements: + - disabled: + value: 'no' + - interval: + value: '1m' diff --git a/tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml b/tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml new file mode 100644 index 0000000000..27a2b3bbb9 --- /dev/null +++ b/tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml @@ -0,0 +1,79 @@ +- sections: + - section: vulnerability-detector + elements: + - enabled: + value: 'yes' + - run_on_start: + value: 'yes' + # - provider: + # attributes: + # - name: 'redhat' + # elements: + # - enabled: + # value: 'yes' + # - os: + # attributes: + # - path: CUSTOM_REDHAT_OVAL_FEED + # value: OS + # - path: + # value: CUSTOM_REDHAT_JSON_FEED + - provider: + attributes: + - name: 'canonical' + elements: + - enabled: + value: 'no' + - provider: + attributes: + - name: 'debian' + elements: + - enabled: + value: 'no' + - provider: + attributes: + - name: 'msu' + elements: + - enabled: + value: 'no' + - provider: + attributes: + - name: 'alas' + elements: + - enabled: + value: 'no' + - provider: + attributes: + - name: 'arch' + elements: + - enabled: + value: 'no' + # - provider: + # attributes: + # - name: 'nvd' + # elements: + # - enabled: + # value: 'yes' + # - path: + # value: CUSTOM_NVD_JSON_FEED + + - section: sca + elements: + - enabled: + value: 'no' + + - section: rootcheck + elements: + - disabled: + value: 'yes' + + - section: syscheck + elements: + - disabled: + value: 'yes' + + - section: wodle + attributes: + - name: 'syscollector' + elements: + - disabled: + value: 'no' diff --git a/tests/end_to_end/test_vulnerability_detector/playbook.yaml b/tests/end_to_end/test_vulnerability_detector/playbook.yaml new file mode 100644 index 0000000000..a4296201b4 --- /dev/null +++ b/tests/end_to_end/test_vulnerability_detector/playbook.yaml @@ -0,0 +1,9 @@ +--- +- name: Basic Windows Playbook + hosts: agent2 + tasks: + - name: Create a text file + win_copy: + src: /tmp/example.txt + dest: C:\textfile.txt + diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_scans.py new file mode 100644 index 0000000000..e775e9dfb5 --- /dev/null +++ b/tests/end_to_end/test_vulnerability_detector/test_scans.py @@ -0,0 +1,211 @@ +import pytest +import os +import pytest +import os +import subprocess +import argparse +import ansible_runner +import base64 +import re +from multiprocessing.pool import ThreadPool +from wazuh_testing.tools.configuration import ( + load_configuration_template, set_section_wazuh_conf +) +import xml.dom.minidom +import yaml +import tempfile + +from wazuh_testing.tools.system import HostManager +from ansible.inventory.manager import InventoryManager +from ansible.parsing.dataloader import DataLoader +from wazuh_testing.tools.monitoring import HostMonitor + + +current_dir = os.path.dirname(__file__) +configurations_dir = os.path.join(current_dir, "data", "configurations") +cases = {} + +with open(os.path.join(current_dir, 'cases.yaml'), 'r') as cases_file: + cases = yaml.load(cases_file, Loader=yaml.FullLoader) + + + +packages_manager = { + 'centos': 'yum', + 'ubuntu': 'apt' +} + +configurations_paths = { + 'manager': os.path.join(configurations_dir, 'manager.yaml'), + 'agent': os.path.join(configurations_dir, 'agent.yaml') +} + +configuration_filepath_os = { + 'linux': '/var/ossec/etc/ossec.conf', + 'windows': 'C:\Program Files (x86)\ossec-agent\ossec.conf', + 'macos': '/Library/Ossec/etc/ossec.conf' +} + +vuln_packages = { + 'centos_7': 'https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm', + 'ubuntu_22': 'https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb' +} + + +vuln_packages_uninstall = { + 'centos_7': 'rclone', + 'ubuntu_22': 'rclone' +} + + +@pytest.fixture(scope='module') +def get_host_manager(request): + inventory_path = request.config.getoption('--inventory-path') + host_manager = HostManager(inventory_path) + + return host_manager, inventory_path + + +@pytest.fixture(scope='module') +def clean_environment(get_host_manager): + hm, inventory = get_host_manager + + for host in hm.get_group_hosts('manager'): + hm.modify_file_content(host, '/var/ossec/logs/alerts/alerts.json', '') + + +@pytest.fixture(scope='module') +def restart_environment(get_host_manager): + host_manager, inventory = get_host_manager + for host in host_manager.get_group_hosts('manager'): + host_manager.handle_wazuh_services(host, 'restart') + + for host in host_manager.get_group_hosts('agent'): + host_manager.handle_wazuh_services(host, 'restart') + + +@pytest.fixture(scope='module', autouse=False) +def configure_environment_manager(get_host_manager): + def configure_host(host): + host_variables = host.get_vars() + host_os = host_variables['os_name'] + configuration_file_path = configuration_filepath_os[host_os] + + + host_configuration = None + host_groups = [str(group) for group in host.get_groups()] + if 'manager' in host_groups: + host_configuration = configurations_paths['manager'] + elif 'agent' in host_groups: + host_configuration = configurations_paths['agent'] + + current_configuration = host_manager.get_file_content(str(host), configuration_file_path) + backup_configurations[host] = current_configuration + new_configuration_template = load_configuration_template(host_configuration, [{}], [{}]) + new_configuration = set_section_wazuh_conf(new_configuration_template[0].get('sections'), current_configuration.split("\n")) + new_configuration = [line for line in new_configuration if line.strip() != ""] + dom = xml.dom.minidom.parseString(''.join(new_configuration)) + new_configuration = "\n".join(dom.toprettyxml().split("\n")[1:]) + + host_manager.modify_file_content(str(host), configuration_file_path, new_configuration) + + backup_configurations = {} + + + host_manager, inventory = get_host_manager + + + loader = DataLoader() + inventory_manager = InventoryManager(loader=loader, sources=inventory) + all_hosts = inventory_manager.get_hosts() + + + with ThreadPool() as pool: + pool.map(configure_host, all_hosts) + + yield + + for host in all_hosts: + host_variables = host.get_vars() + host_os = host_variables['os_name'] + configuration_file_path = configuration_filepath_os[host_os] + + host_manager.modify_file_content(str(host), configuration_file_path, backup_configurations[host]) + +complete_list = [ (case['preconditions'], case['body'], case['teardown']) for case in cases] +list_ids = [ case['id'] for case in cases] + + + +def launch_remote_operation(host, operation, operation_data, hm, inventory): + host_os_name = hm.get_host_variables(host)['os'].split('_')[0] + if operation == 'install_package': + package_data = operation_data['package'] + package_url = package_data[host_os_name] + package_manager = packages_manager[host_os_name] + print(f"Install package {host} {package_url} {package_manager}") + hm.install_package(host, package_url, package_manager ) + elif operation == 'remove_package': + package_data = operation_data['package'] + package_name = package_data[host_os_name] + package_manager = packages_manager[host_os_name] + hm.remove_package(host, package_name, package_manager ) + elif operation == 'check_alerts': + # Check alert + temp_file = create_monitoring_file(hm.get_host_variables(host)['manager'], operation_data[host_os_name], '/var/ossec/logs/alerts/alerts.json', 120) + import pdb; pdb.set_trace() + HostMonitor(inventory_path=inventory, messages_path=temp_file.name, tmp_path='/tmp/').run() + temp_file.close() + + +def launch_remote_sequential_operation_on_agent(agent, task_list, host_manager, inventory): + if task_list: + for task in task_list: + task_keys = list(task.keys()) + task_values = list(task.values()) + operation, operation_data = task_keys[0], task_values[0] + launch_remote_operation(agent, operation, operation_data, host_manager, inventory) + + +def launch_parallel_operations(task_list, host_manager, inventory, group='agent'): + agents = host_manager.get_group_hosts('agent') + parallel_configuration = [(agent, task_list, host_manager, inventory) for agent in agents] + with ThreadPool() as pool: + # Use the pool to map the function to the list of hosts + pool.starmap(launch_remote_sequential_operation_on_agent, parallel_configuration) + + +@pytest.fixture(scope='function') +def setup(preconditions, teardown, get_host_manager): + hm, inventory = get_host_manager + launch_parallel_operations(preconditions, hm, inventory) + + yield + + launch_parallel_operations(teardown, hm, inventory) + + +def create_monitoring_file(host, regex, monitoring_file, timeout): + monitoring_file_content = f"""\ +{host}: + - regex: "{regex}" + path: {monitoring_file} + timeout: {timeout} +""" + + temp_file = tempfile.NamedTemporaryFile() + + with open(temp_file.name, 'w+') as tmp: + # with tempfile.NamedTemporaryFile(delete=False) as tmp: + tmp.write(monitoring_file_content) + + return temp_file + + + +@pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) +def test_vulnerability_detector(configure_environment_manager, preconditions, body, teardown, setup, get_host_manager): + """ + """ + hm, inventory = get_host_manager + launch_parallel_operations(body, hm, inventory) diff --git a/tests/system/provisioning/environment_test_dictionary.json b/tests/system/provisioning/environment_test_dictionary.json index cfe55e5323..fd2a6c1482 100644 --- a/tests/system/provisioning/environment_test_dictionary.json +++ b/tests/system/provisioning/environment_test_dictionary.json @@ -65,5 +65,8 @@ "end_to_end/test_basic_cases/test_vulnerability_detector/test_vulnerability_detector_windows/test_vulnerability_detection_windows.py", "end_to_end/test_basic_cases/test_windows_defender/test_windows_defender.py", "end_to_end/test_basic_cases/test_yara_integration/test_yara_integration.py", + ], + "e2e_vulnerability_detector": [ + "end_to_end/test_vulnerability_detector/test_scans.py" ] } From 5b787f0ad8ec0df3635dc5d8829d03e769217a6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 6 Nov 2023 18:04:48 +0000 Subject: [PATCH 002/174] feat: create generate check alert function --- .../wazuh_testing/tools/system.py | 2 +- .../wazuh/wazuh-indexer/tasks/Debian.yml | 8 ++++++++ .../test_vulnerability_detector/cases.yaml | 5 +++-- .../test_vulnerability_detector/test_scans.py | 20 +++++++++++++++++-- 4 files changed, 30 insertions(+), 5 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index e699948110..2fde309fdd 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -102,7 +102,7 @@ def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/o check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be applied. """ self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=0775", - check=check, become=True) + check=check) def add_block_to_file(self, host: str, path: str, replace: str, before: str, after, check: bool = False): """Add text block to desired file. diff --git a/provisioning/roles/wazuh/wazuh-indexer/tasks/Debian.yml b/provisioning/roles/wazuh/wazuh-indexer/tasks/Debian.yml index cf42582b2b..725b8d55f9 100644 --- a/provisioning/roles/wazuh/wazuh-indexer/tasks/Debian.yml +++ b/provisioning/roles/wazuh/wazuh-indexer/tasks/Debian.yml @@ -1,6 +1,14 @@ --- +- name: Install + apt: + update_cache: yes + register: update_result + until: update_result is success + retries: 3 + delay: 10 + - name: Update cache apt: update_cache: yes diff --git a/tests/end_to_end/test_vulnerability_detector/cases.yaml b/tests/end_to_end/test_vulnerability_detector/cases.yaml index cd8b6a1ffc..e2852df5e7 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases.yaml @@ -8,8 +8,9 @@ centos: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm ubuntu: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb - check_alerts: - centos: '.*installed.*.*agent\".*\"name\":\"\":\"rclone\",\"arch\":\"amd64\",\"version\":\"1.49.5\"' - ubuntu: '.*New dpkg \\(Debian Package\\) installed.*.*agent\".*\"name\":\".*package\":\"rclone\",\"arch\":\"amd64\",\"version\":\"1.49.5\"' + installed_package: + name: "rclone" + version: "1.49.5" teardown: - remove_package: package: diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_scans.py index d8fbd0d0a5..ba3da36bde 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_scans.py +++ b/tests/end_to_end/test_vulnerability_detector/test_scans.py @@ -53,6 +53,17 @@ + +def get_operation_regex(host, hm, name, version): + host_os_name = hm.get_host_variables(host)['os'].split('_')[0] + installed_package_alert = '' + if host_os_name == 'centos': + installed_package_alert = rf".*installed.*.*agent\".*\"name\":\"{host}\":\"{name}\",\"arch\":\"amd64\",\"version\":\"{version}" + elif host_os_name == 'ubuntu': + installed_package_alert = rf".*New dpkg \(Debian Package\) installed.*.*agent\".*\"name\":\"{host}.*package\":\"{name}\",\"arch\":\"amd64\",\"version\":\"{version}\"" + return installed_package_alert + + @pytest.fixture(scope='module') def get_host_manager(request): inventory_path = request.config.getoption('--inventory-path') @@ -133,6 +144,7 @@ def configure_host(host): def launch_remote_operation(host, operation, operation_data, hm, inventory): + print(f"Operation {operation} in {host} with {operation_data}") host_os_name = hm.get_host_variables(host)['os'].split('_')[0] if operation == 'install_package': package_data = operation_data['package'] @@ -147,8 +159,12 @@ def launch_remote_operation(host, operation, operation_data, hm, inventory): hm.remove_package(host, package_name, package_manager ) elif operation == 'check_alerts': # Check alert - temp_file = create_monitoring_file(hm.get_host_variables(host)['manager'], operation_data[host_os_name], '/var/ossec/logs/alerts/alerts.json', 120) - HostMonitor(inventory_path=inventory, messages_path=temp_file.name, tmp_path='/tmp/').run() + # Replacing placeholders + final_regex = get_operation_regex(host, hm, operation_data['installed_package']['name'], operation_data['installed_package']['version']) + temp_file = create_monitoring_file(hm.get_host_variables(host)['manager'], final_regex, '/var/ossec/logs/alerts/alerts.json', 120) + print(temp_file.name) + + HostMonitor(inventory_path=inventory, messages_path=temp_file.name, tmp_path='/tmp/example').run() temp_file.close() From e8992ccf957ddf28aa930239da8ec7b504559147 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 7 Nov 2023 18:59:42 +0000 Subject: [PATCH 003/174] feat: created test_vulnerability_detector basic tests structure --- .../wazuh_testing/tools/system.py | 3 + .../test_vulnerability_detector/cases.yaml | 203 +++++++++++++++++- .../test_vulnerability_detector/playbook.yaml | 9 - .../test_vulnerability_detector/test_scans.py | 109 +++++++--- 4 files changed, 275 insertions(+), 49 deletions(-) delete mode 100644 tests/end_to_end/test_vulnerability_detector/playbook.yaml diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 2fde309fdd..8d98ba4a2b 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -181,6 +181,9 @@ def get_file_content(self, host: str, file_path: str): # return self.get_host(host).file(file_path).content_string testinfra_host = self.get_host(host) result = testinfra_host.ansible("slurp", f"src='{file_path}'", check=False) + if 'content' not in result: + raise Exception(f"No content value in {result}") + return base64.b64decode(result['content']).decode('utf-8') diff --git a/tests/end_to_end/test_vulnerability_detector/cases.yaml b/tests/end_to_end/test_vulnerability_detector/cases.yaml index e2852df5e7..9c51cd0fb7 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases.yaml @@ -3,16 +3,205 @@ description: "Installation of a vulnerable package" preconditions: null body: - - install_package: + tasks: + - install_package: + package: + centos: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm + ubuntu: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb + check_alerts: + centos: + - event: install_package + name: "rclone" + version: "1.49.5" + ubuntu: + - event: install_package + name: "rclone" + version: "1.49.5" + teardown: + - remove_package: package: - centos: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm - ubuntu: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb - - check_alerts: - installed_package: - name: "rclone" - version: "1.49.5" + centos: rclone + ubuntu: rclone + +- case: "Updating a vulnerable package that remains vulnerable to the same CVE" + id: "update_vuln_package_remain_vulnerable" + description: "Updating a vulnerable package that remains vulnerable to the same CVE" + preconditions: + tasks: + - install_package: + package: + centos: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm + ubuntu: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb + check_alerts: + centos: + - event: install_package + name: "rclone" + version: "1.49.5" + ubuntu: + - event: install_package + name: "rclone" + version: "1.49.5" + body: + tasks: + - install_package: + package: + centos: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.rpm + ubuntu: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.deb + check_alerts: + centos: + - event: upgrade_package + name: "rclone" + version: "1.50.0" + ubuntu: + - event: upgrade_package + name: "rclone" + version: "1.50.0" teardown: - remove_package: package: centos: rclone ubuntu: rclone + +# ---- + +# - case: "Updating a vulnerable package that becomes vulnerable to another CVE" +# id: "update_vuln_package_additional_vulnerablility" +# description: "Updating a vulnerable package that becomes vulnerable to another CVE" +# preconditions: +# tasks: +# - install_package: +# package: +# centos: https://nmap.org/dist/nmap-6.47-1.x86_64.rpm +# ubuntu: https://sourceforge.net/projects/webadmin/files/webmin/1.999/webmin_1.999_all.deb/download +# check_alerts: +# centos: +# - event: install_package +# name: "nmap" +# version: "6.47" +# ubuntu: +# - event: install_package +# name: "webmin" +# version: "1.999" +# body: +# tasks: +# - install_package: +# package: +# centos: https://nmap.org/dist/nmap-7.00-1.x86_64.rpm +# ubuntu: https://sourceforge.net/projects/webadmin/files/webmin/1.999/webmin_2.000_all.deb/download +# check_alerts: +# centos: +# - event: upgrade_package +# name: "nmap" +# version: "7.00" +# ubuntu: +# - event: upgrade_package +# name: "webmin" +# version: "2.000" +# teardown: +# - remove_package: +# package: +# centos: nmap +# ubuntu: webmin + + +# ------------------------ + +- case: "Vulnerable package that update is also vulnerable to another CVE" + id: "update_vuln_package_additional_vulnerablility" + description: "Vulnerable package that update is also vulnerable to another CVE" + preconditions: + tasks: + - install_package: + package: + centos: https://repo.mongodb.org/yum/redhat/7/mongodb-org/4.2/x86_64/RPMS/mongodb-org-server-4.2.11-1.el7.x86_64.rpm + ubuntu: https://repo.mongodb.org/apt/ubuntu/dists/bionic/mongodb-org/4.2/multiverse/binary-amd64/mongodb-org-server_4.2.11_amd64.deb + check_alerts: + centos: + - event: install_package + name: "mongodb" + version: "4.2.11" + ubuntu: + - event: install_package + name: "mongodb" + version: "4.2.11" + body: + tasks: + - install_package: + package: + centos: https://repo.mongodb.org/yum/redhat/7/mongodb-org/4.4/x86_64/RPMS/mongodb-org-server-4.4.10-1.el7.x86_64.rpm + ubuntu: https://repo.mongodb.org/apt/ubuntu/dists/focal/mongodb-org/4.4/multiverse/binary-amd64/mongodb-org-server_4.4.10_amd64.deb + check_alerts: + centos: + - event: upgrade_package + name: "mongodb" + version: "4.4.10" + ubuntu: + - event: upgrade_package + name: "mongodb" + version: "4.4.10" + teardown: + - remove_package: + package: + centos: mongodb + ubuntu: mongodb + + +- case: "Vulnerable package that update cease to be vulnerable" + id: "update_vuln_package_cease_vuln" + description: "Vulnerable package that update cease to be vulnerable" + preconditions: + tasks: + - install_package: + package: + centos: https://master.dl.sourceforge.net/project/webadmin/webmin/2.003/webmin-2.003-1.noarch.rpm + ubuntu: https://master.dl.sourceforge.net/project/webadmin/webmin/2.003/webmin_2.003_all.deb + check_alerts: + centos: + - event: install_package + name: "webmin" + version: "2.003" + ubuntu: + - event: install_package + name: "webmin" + version: "2.003" + body: + tasks: + - install_package: + package: + centos: https://kumisystems.dl.sourceforge.net/project/webadmin/webmin/2.000/webmin-2.000-1.noarch.rpm + ubuntu: https://altushost-swe.dl.sourceforge.net/project/webadmin/webmin/2.000/webmin_2.000_all.deb + check_alerts: + centos: + - event: upgrade_package + name: "webmin" + version: "2.000" + ubuntu: + - event: upgrade_package + name: "webmin" + version: "2.000" + teardown: + - remove_package: + package: + centos: webmin + ubuntu: webmin + + + + +# "Deleting a vulnerable package" + + + +# - case: "Installation of a non-vulnerable package" + + + +# - case: "Updating a non-vulnerable package that becomes vulnerable" + + + +# Updating of a non-vulnerable package that remains non-vulnerable + + + +# Deletion of a non-vulnerable package diff --git a/tests/end_to_end/test_vulnerability_detector/playbook.yaml b/tests/end_to_end/test_vulnerability_detector/playbook.yaml deleted file mode 100644 index a4296201b4..0000000000 --- a/tests/end_to_end/test_vulnerability_detector/playbook.yaml +++ /dev/null @@ -1,9 +0,0 @@ ---- -- name: Basic Windows Playbook - hosts: agent2 - tasks: - - name: Create a text file - win_copy: - src: /tmp/example.txt - dest: C:\textfile.txt - diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_scans.py index ba3da36bde..d963f56894 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_scans.py +++ b/tests/end_to_end/test_vulnerability_detector/test_scans.py @@ -46,22 +46,29 @@ 'macos': '/Library/Ossec/etc/ossec.conf' } -vuln_packages = { - 'centos_7': 'https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm', - 'ubuntu_22': 'https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb' -} - - -def get_operation_regex(host, hm, name, version): +def get_operation_regex(host, hm, event, operation_data): host_os_name = hm.get_host_variables(host)['os'].split('_')[0] - installed_package_alert = '' - if host_os_name == 'centos': - installed_package_alert = rf".*installed.*.*agent\".*\"name\":\"{host}\":\"{name}\",\"arch\":\"amd64\",\"version\":\"{version}" - elif host_os_name == 'ubuntu': - installed_package_alert = rf".*New dpkg \(Debian Package\) installed.*.*agent\".*\"name\":\"{host}.*package\":\"{name}\",\"arch\":\"amd64\",\"version\":\"{version}\"" - return installed_package_alert + alert_regex = '' + if event == 'install_package': + if host_os_name == 'centos': + alert_regex = rf".*installed.*agent\".*\"name\":\"{host}\".*Installed: {operation_data['name']}.*{operation_data['version']}" + elif host_os_name == 'ubuntu': + alert_regex = rf".*New dpkg \\(Debian Package\\) installed.*.*agent\".*\"name\":\"{host}.*package\":\"{operation_data['name']}\",\"arch\":\"amd64\",\"version\":\"{operation_data['version']}\"" + elif event == 'remove_package': + if host_os_name == 'centos': + alert_regex = rf".*package deleted.*agent\".*\"name\":\"{host}\".*Erased: {operation_data['name']}.*{operation_data['version']}" + elif host_os_name == 'ubuntu': + alert_regex = rf".*\\(Debian Package\\) removed.*.*agent\".*\"name\":\"{host}.*package\":\"{operation_data['name']}\",\"arch\":\"amd64\",\"version\":\"{operation_data['version']}\"" + + elif event == 'update_package': + if host_os_name == 'centos': + alert_regex = rf".*package updated.*agent\".*\"name\":\"{host}\".*Updated: {operation_data['name']}.*{operation_data['version']}" + elif host_os_name == 'ubuntu': + alert_regex = rf".*New dpkg \\(Debian Package\\) installed.*.*agent\".*\"name\":\"{host}.*package\":\"{operation_data['name']}\",\"arch\":\"amd64\",\"version\":\"{operation_data['version']}\"" + + return alert_regex @pytest.fixture(scope='module') @@ -142,7 +149,6 @@ def configure_host(host): list_ids = [ case['id'] for case in cases] - def launch_remote_operation(host, operation, operation_data, hm, inventory): print(f"Operation {operation} in {host} with {operation_data}") host_os_name = hm.get_host_variables(host)['os'].split('_')[0] @@ -157,15 +163,6 @@ def launch_remote_operation(host, operation, operation_data, hm, inventory): package_name = package_data[host_os_name] package_manager = packages_manager[host_os_name] hm.remove_package(host, package_name, package_manager ) - elif operation == 'check_alerts': - # Check alert - # Replacing placeholders - final_regex = get_operation_regex(host, hm, operation_data['installed_package']['name'], operation_data['installed_package']['version']) - temp_file = create_monitoring_file(hm.get_host_variables(host)['manager'], final_regex, '/var/ossec/logs/alerts/alerts.json', 120) - print(temp_file.name) - - HostMonitor(inventory_path=inventory, messages_path=temp_file.name, tmp_path='/tmp/example').run() - temp_file.close() def launch_remote_sequential_operation_on_agent(agent, task_list, host_manager, inventory): @@ -188,22 +185,58 @@ def launch_parallel_operations(task_list, host_manager, inventory, group='agent' @pytest.fixture(scope='function') def setup(preconditions, teardown, get_host_manager): hm, inventory = get_host_manager - launch_parallel_operations(preconditions, hm, inventory) + launch_parallel_operations(preconditions['tasks'], hm, inventory) + if 'check_alerts' in preconditions: + temp_file = create_monitoring_file(hm, preconditions['check_alerts'], '/var/ossec/logs/alerts/alerts.json', 120) + + local_path = os.path.dirname(os.path.abspath(__file__)) + tmp_path = os.path.join(local_path, 'tmp') + + HostMonitor(inventory_path=inventory, messages_path=temp_file.name, tmp_path=tmp_path).run() + temp_file.close() yield launch_parallel_operations(teardown, hm, inventory) + for host in hm.get_group_hosts('manager'): + hm.modify_file_content(host, path='/var/ossec/logs/alerts.json', content='') -def create_monitoring_file(host, regex, monitoring_file, timeout): - monitoring_file_content = f"""\ -{host}: - - regex: "{regex}" - path: {monitoring_file} - timeout: {timeout} -""" - temp_file = tempfile.NamedTemporaryFile() +def create_monitoring_file(hm, operation_data, monitoring_file, timeout, group='agent'): + monitoring_file_content = '' + monitoring_data = {} + available_events = ['install_package', 'remove_package', 'update_package'] + + for host in hm.get_group_hosts(group): + host_os_name = hm.get_host_variables(host)['os'].split('_')[0] + for event in operation_data['check_alerts'][host_os_name]: + event_type = event['event'] + + alerts_host = hm.get_host_variables(host)['manager'] + final_regex = get_operation_regex(host, hm, event, operation_data) + + monitoring = { + 'regex': f"{final_regex}", + 'path': f"{monitoring_file}", + 'timeout': f"{timeout}" + } + if alerts_host not in monitoring_data: + monitoring_data[alerts_host] = [] + + monitoring_data[alerts_host].append(monitoring) + + for host, data in monitoring_data.items(): + monitoring_file_content += f"""\ + {host}: +""" + for regex_monitoring in data: + monitoring_file_content += f"""\ + - regex: "{regex_monitoring['regex']}" + path: "{regex_monitoring['path']}" + timeout: {regex_monitoring['timeout']} +""" + temp_file = tempfile.NamedTemporaryFile() with open(temp_file.name, 'w+') as tmp: # with tempfile.NamedTemporaryFile(delete=False) as tmp: @@ -213,9 +246,19 @@ def create_monitoring_file(host, regex, monitoring_file, timeout): + @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) def test_vulnerability_detector(configure_environment_manager, preconditions, body, teardown, setup, get_host_manager): """ """ hm, inventory = get_host_manager - launch_parallel_operations(body, hm, inventory) + launch_parallel_operations(body['tasks'], hm, inventory) + + temp_file = create_monitoring_file(hm, body['check_alerts'], '/var/ossec/logs/alerts/alerts.json', 120) + + local_path = os.path.dirname(os.path.abspath(__file__)) + tmp_path = os.path.join(local_path, 'tmp') + + + HostMonitor(inventory_path=inventory, messages_path=temp_file.name, tmp_path=tmp_path).run() + temp_file.close() \ No newline at end of file From baddbd457e1e0cbe0c398d6ac34247ada42c5358 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 8 Nov 2023 18:53:32 +0000 Subject: [PATCH 004/174] refac: improve tests readability --- .../wazuh_testing/tools/system.py | 4 +- .../test_vulnerability_detector/cases.yaml | 212 ++++++++-------- .../data/regex.yaml | 34 +++ .../inventory.yaml | 186 ++++++++++++++ .../test_vulnerability_detector/test_scans.py | 234 ++++++++++++------ 5 files changed, 491 insertions(+), 179 deletions(-) create mode 100644 tests/end_to_end/test_vulnerability_detector/data/regex.yaml create mode 100644 tests/end_to_end/test_vulnerability_detector/inventory.yaml diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 8d98ba4a2b..985be570a6 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -104,6 +104,7 @@ def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/o self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=0775", check=check) + def add_block_to_file(self, host: str, path: str, replace: str, before: str, after, check: bool = False): """Add text block to desired file. @@ -112,7 +113,7 @@ def add_block_to_file(self, host: str, path: str, replace: str, before: str, aft path (str): Path of the file replace (str): Text to be inserted in the file before (str): Lower stop of the block to be replaced - after (str): Upper stop of the block to be replaced + after (str): Upper stop of172.31.6.71 the block to be replaced check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be applied. Default `False`. """ @@ -132,6 +133,7 @@ def modify_file_content(self, host: str, path: str = None, content: Union[str, b tmp_file.write(content if isinstance(content, bytes) else content.encode()) tmp_file.seek(0) self.move_file(host, src_path=tmp_file.name, dest_path=path) + tmp_file.close() def control_service(self, host: str, service: str = 'wazuh', state: str = "started", check: bool = False): diff --git a/tests/end_to_end/test_vulnerability_detector/cases.yaml b/tests/end_to_end/test_vulnerability_detector/cases.yaml index 9c51cd0fb7..b1ac56fba3 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases.yaml @@ -6,17 +6,25 @@ tasks: - install_package: package: - centos: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm - ubuntu: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb + centos: + amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm + arm: null + ubuntu: + amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb + arm: null + windows: + amd64: http://sourceforge.net/projects/firebird/files/firebird-win32/2.0.7-Release/Firebird-2.0.7.13318_0_win32.exe/download check_alerts: centos: - - event: install_package - name: "rclone" - version: "1.49.5" + - event: syscollector_install_package_alert_yum + parameters: + PACKAGE_NAME: "rclone" + PACKAGE_VERSION: "1.49.5" ubuntu: - - event: install_package - name: "rclone" - version: "1.49.5" + - event: syscollector_install_package_alert_apt + parameters: + PACKAGE_NAME: "rclone" + PACKAGE_VERSION: "1.49.5" teardown: - remove_package: package: @@ -30,32 +38,40 @@ tasks: - install_package: package: - centos: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm - ubuntu: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb + centos: + amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm + ubuntu: + amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb check_alerts: centos: - - event: install_package - name: "rclone" - version: "1.49.5" + - event: syscollector_install_package_alert_yum + parameters: + PACKAGE_NAME: "rclone" + PACKAGE_VERSION: "1.49.5" ubuntu: - - event: install_package - name: "rclone" - version: "1.49.5" + - event: syscollector_install_package_alert_apt + parameters: + PACKAGE_NAME: "rclone" + PACKAGE_VERSION: "1.49.5" body: tasks: - install_package: package: - centos: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.rpm - ubuntu: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.deb + centos: + amd64: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.rpm + ubuntu: + amd64: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.deb check_alerts: centos: - event: upgrade_package - name: "rclone" - version: "1.50.0" + parameters: + PACKAGE_NAME: "rclone" + PACKAGE_VERSION: "1.50.0" ubuntu: - event: upgrade_package - name: "rclone" - version: "1.50.0" + parameters: + PACKAGE_NAME: "rclone" + PACKAGE_VERSION: "1.50.0" teardown: - remove_package: package: @@ -106,84 +122,84 @@ # ------------------------ -- case: "Vulnerable package that update is also vulnerable to another CVE" - id: "update_vuln_package_additional_vulnerablility" - description: "Vulnerable package that update is also vulnerable to another CVE" - preconditions: - tasks: - - install_package: - package: - centos: https://repo.mongodb.org/yum/redhat/7/mongodb-org/4.2/x86_64/RPMS/mongodb-org-server-4.2.11-1.el7.x86_64.rpm - ubuntu: https://repo.mongodb.org/apt/ubuntu/dists/bionic/mongodb-org/4.2/multiverse/binary-amd64/mongodb-org-server_4.2.11_amd64.deb - check_alerts: - centos: - - event: install_package - name: "mongodb" - version: "4.2.11" - ubuntu: - - event: install_package - name: "mongodb" - version: "4.2.11" - body: - tasks: - - install_package: - package: - centos: https://repo.mongodb.org/yum/redhat/7/mongodb-org/4.4/x86_64/RPMS/mongodb-org-server-4.4.10-1.el7.x86_64.rpm - ubuntu: https://repo.mongodb.org/apt/ubuntu/dists/focal/mongodb-org/4.4/multiverse/binary-amd64/mongodb-org-server_4.4.10_amd64.deb - check_alerts: - centos: - - event: upgrade_package - name: "mongodb" - version: "4.4.10" - ubuntu: - - event: upgrade_package - name: "mongodb" - version: "4.4.10" - teardown: - - remove_package: - package: - centos: mongodb - ubuntu: mongodb +# - case: "Vulnerable package that update is also vulnerable to another CVE" +# id: "update_vuln_package_additional_vulnerablility" +# description: "Vulnerable package that update is also vulnerable to another CVE" +# preconditions: +# tasks: +# - install_package: +# package: +# centos: https://repo.mongodb.org/yum/redhat/7/mongodb-org/4.2/x86_64/RPMS/mongodb-org-server-4.2.11-1.el7.x86_64.rpm +# ubuntu: https://repo.mongodb.org/apt/ubuntu/dists/bionic/mongodb-org/4.2/multiverse/binary-amd64/mongodb-org-server_4.2.11_amd64.deb +# check_alerts: +# centos: +# - event: install_package +# name: "mongodb" +# version: "4.2.11" +# ubuntu: +# - event: install_package +# name: "mongodb" +# version: "4.2.11" +# body: +# tasks: +# - install_package: +# package: +# centos: https://repo.mongodb.org/yum/redhat/7/mongodb-org/4.4/x86_64/RPMS/mongodb-org-server-4.4.10-1.el7.x86_64.rpm +# ubuntu: https://repo.mongodb.org/apt/ubuntu/dists/focal/mongodb-org/4.4/multiverse/binary-amd64/mongodb-org-server_4.4.10_amd64.deb +# check_alerts: +# centos: +# - event: upgrade_package +# name: "mongodb" +# version: "4.4.10" +# ubuntu: +# - event: upgrade_package +# name: "mongodb" +# version: "4.4.10" +# teardown: +# - remove_package: +# package: +# centos: mongodb +# ubuntu: mongodb -- case: "Vulnerable package that update cease to be vulnerable" - id: "update_vuln_package_cease_vuln" - description: "Vulnerable package that update cease to be vulnerable" - preconditions: - tasks: - - install_package: - package: - centos: https://master.dl.sourceforge.net/project/webadmin/webmin/2.003/webmin-2.003-1.noarch.rpm - ubuntu: https://master.dl.sourceforge.net/project/webadmin/webmin/2.003/webmin_2.003_all.deb - check_alerts: - centos: - - event: install_package - name: "webmin" - version: "2.003" - ubuntu: - - event: install_package - name: "webmin" - version: "2.003" - body: - tasks: - - install_package: - package: - centos: https://kumisystems.dl.sourceforge.net/project/webadmin/webmin/2.000/webmin-2.000-1.noarch.rpm - ubuntu: https://altushost-swe.dl.sourceforge.net/project/webadmin/webmin/2.000/webmin_2.000_all.deb - check_alerts: - centos: - - event: upgrade_package - name: "webmin" - version: "2.000" - ubuntu: - - event: upgrade_package - name: "webmin" - version: "2.000" - teardown: - - remove_package: - package: - centos: webmin - ubuntu: webmin +# - case: "Vulnerable package that update cease to be vulnerable" +# id: "update_vuln_package_cease_vuln" +# description: "Vulnerable package that update cease to be vulnerable" +# preconditions: +# tasks: +# - install_package: +# package: +# centos: https://master.dl.sourceforge.net/project/webadmin/webmin/2.003/webmin-2.003-1.noarch.rpm +# ubuntu: https://master.dl.sourceforge.net/project/webadmin/webmin/2.003/webmin_2.003_all.deb +# check_alerts: +# centos: +# - event: install_package +# name: "webmin" +# version: "2.003" +# ubuntu: +# - event: install_package +# name: "webmin" +# version: "2.003" +# body: +# tasks: +# - install_package: +# package: +# centos: https://kumisystems.dl.sourceforge.net/project/webadmin/webmin/2.000/webmin-2.000-1.noarch.rpm +# ubuntu: https://altushost-swe.dl.sourceforge.net/project/webadmin/webmin/2.000/webmin_2.000_all.deb +# check_alerts: +# centos: +# - event: upgrade_package +# name: "webmin" +# version: "2.000" +# ubuntu: +# - event: upgrade_package +# name: "webmin" +# version: "2.000" +# teardown: +# - remove_package: +# package: +# centos: webmin +# ubuntu: webmin diff --git a/tests/end_to_end/test_vulnerability_detector/data/regex.yaml b/tests/end_to_end/test_vulnerability_detector/data/regex.yaml new file mode 100644 index 0000000000..16b9546824 --- /dev/null +++ b/tests/end_to_end/test_vulnerability_detector/data/regex.yaml @@ -0,0 +1,34 @@ +### Syscollector Events +syscollector_first_scan_start: + regex: ".*INFO: Starting evaluation." + +syscollector_first_scan_end: + regex: ".*INFO: Starting evaluation." + +syscollector_install_package_alert_yum: + regex: '.*installed.*agent".*"name":"HOST_NAME".*Installed: PACKAGE_NAME.*PACKAGE_VERSION' + parameters: ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] + +syscollector_install_package_alert_apt: + regex: '.*New dpkg \(Debian Package\) installed.*.*agent".*"name":"HOST_NAME.*package":"PACKAGE_NAME","arch":"amd64","version":"PACKAGE_VERSION"' + parameters: ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] + + +# syscollector_remove_package_alert_yum: +# regex: ".*installed.*agent".*"name":"HOST_NAME".*Installed: PACKAGE_NAME.*PACKAGE_VERSION" +# variables: ['PACKAGE_NAME', 'PACKAGE_VERSION'] + +# syscollector_remove_package_alert_apt: +# regex: ".*New dpkg \\(Debian Package\\) installed.*.*agent".*"name":"HOST_NAME.*package":"PACKAGE_NAME","arch":"amd64","version":"PACKAGE_VERSION"" +# variables: ['PACKAGE_NAME', 'PACKAGE_VERSION'] + + + + +# syscollector_upgrade_package_alert_yum: +# regex: ".*installed.*agent".*"name":"HOST_NAME".*Installed: PACKAGE_NAME.*PACKAGE_VERSION" +# variables: ['PACKAGE_NAME', 'PACKAGE_VERSION'] + +# syscollector_upgrade_package_alert_apt: +# regex: ".*New dpkg \\(Debian Package\\) installed.*.*agent".*"name":"HOST_NAME.*package":"PACKAGE_NAME","arch":"amd64","version":"PACKAGE_VERSION"" +# variables: ['PACKAGE_NAME', 'PACKAGE_VERSION'] diff --git a/tests/end_to_end/test_vulnerability_detector/inventory.yaml b/tests/end_to_end/test_vulnerability_detector/inventory.yaml new file mode 100644 index 0000000000..419d4933e1 --- /dev/null +++ b/tests/end_to_end/test_vulnerability_detector/inventory.yaml @@ -0,0 +1,186 @@ +manager: + hosts: + manager1: + ip: 172.31.8.185 + version: 4.4.5 + revision: 1 + repository: live + type: master + os: ubuntu_22 + service: EC2 + resources: + cpu: 4 + memory: 8192 + ansible_host: 172.31.8.185 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + manager2: + ip: 172.31.7.16 + version: 4.4.5 + revision: 1 + repository: live + type: worker + os: ubuntu_22 + service: EC2 + resources: + cpu: 2 + memory: 4096 + ansible_host: 172.31.7.16 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + vars: {} +filebeat: + hosts: + manager1: + ip: 172.31.8.185 + os: ubuntu_22 + service: EC2 + resources: + cpu: 4 + memory: 8192 + ansible_host: 172.31.8.185 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + manager2: + ip: 172.31.7.16 + os: ubuntu_22 + service: EC2 + resources: + cpu: 2 + memory: 4096 + ansible_host: 172.31.7.16 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + vars: {} +indexer: + hosts: + manager1: + ip: 172.31.8.185 + version: 4.4.5 + revision: 1 + repository: live + os: ubuntu_22 + service: EC2 + resources: + cpu: 4 + memory: 8192 + ansible_host: 172.31.8.185 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + indexer_user: admin + indexer_password: changeme + vars: {} +qa_framework: + hosts: + manager1: + ip: 172.31.8.185 + qa_repository_reference: enhacement/4590-vd-basic-test-cases + os: ubuntu_22 + service: EC2 + resources: + cpu: 4 + memory: 8192 + ansible_host: 172.31.8.185 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + manager2: + ip: 172.31.7.16 + qa_repository_reference: enhacement/4590-vd-basic-test-cases + os: ubuntu_22 + service: EC2 + resources: + cpu: 2 + memory: 4096 + ansible_host: 172.31.7.16 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + ip-172-31-8-185: + ip: 172.31.15.144 + qa_repository_reference: enhacement/4590-vd-basic-test-cases + os: centos_7 + service: EC2 + ansible_host: 172.31.15.144 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.11 + ip-172-31-6-71: + ip: 172.31.6.71 + qa_repository_reference: enhacement/4590-vd-basic-test-cases + os: ubuntu_22 + service: EC2 + ansible_host: 172.31.6.71 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + vars: {} +agent: + children: + linux: + hosts: + ip-172-31-8-185: + ip: 172.31.15.144 + version: 4.4.5 + revision: 1 + repository: live + manager: manager1 + os: centos_7 + service: EC2 + ansible_host: 172.31.15.144 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.11 + manager_ip: 172.31.8.185 + arch: amd64 + ip-172-31-6-71: + ip: 172.31.6.71 + version: 4.4.5 + revision: 1 + repository: live + manager: manager1 + os: ubuntu_22 + service: EC2 + ansible_host: 172.31.6.71 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + manager_ip: 172.31.8.185 + arch: amd64 + macos: + hosts: {} + solaris: + hosts: {} + +all: + vars: + ansible_ssh_common_args: -o StrictHostKeyChecking=no + ansible_winrm_server_cert_validation: ignore + ansible_ssh_private_key_file: /home/rebits/.ssh/JenkinsEphemeral2.pem \ No newline at end of file diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_scans.py index d963f56894..84003cc70f 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_scans.py +++ b/tests/end_to_end/test_vulnerability_detector/test_scans.py @@ -24,12 +24,15 @@ current_dir = os.path.dirname(__file__) configurations_dir = os.path.join(current_dir, "data", "configurations") cases = {} +local_path = os.path.dirname(os.path.abspath(__file__)) +tmp_path = os.path.join(local_path, 'tmp') +regex_path = os.path.join(current_dir, 'data', 'regex.yaml') + with open(os.path.join(current_dir, 'cases.yaml'), 'r') as cases_file: cases = yaml.load(cases_file, Loader=yaml.FullLoader) - packages_manager = { 'centos': 'yum', 'ubuntu': 'apt' @@ -45,32 +48,30 @@ 'windows': 'C:\Program Files (x86)\ossec-agent\ossec.conf', 'macos': '/Library/Ossec/etc/ossec.conf' } +logs_filepath_os = { + 'linux': '/var/ossec/logs/ossec.log', + 'windows': 'C:\Program Files (x86)\ossec-agent\ossec.log', + 'macos': '/Library/Ossec/logs/ossec.log' +} +def get_event_regex(event, operation_data=None): + regexes = {} + with open(regex_path, 'r') as regex_file: + regexes = yaml.load(regex_file, Loader=yaml.FullLoader) -def get_operation_regex(host, hm, event, operation_data): - host_os_name = hm.get_host_variables(host)['os'].split('_')[0] - alert_regex = '' - if event == 'install_package': - if host_os_name == 'centos': - alert_regex = rf".*installed.*agent\".*\"name\":\"{host}\".*Installed: {operation_data['name']}.*{operation_data['version']}" - elif host_os_name == 'ubuntu': - alert_regex = rf".*New dpkg \\(Debian Package\\) installed.*.*agent\".*\"name\":\"{host}.*package\":\"{operation_data['name']}\",\"arch\":\"amd64\",\"version\":\"{operation_data['version']}\"" - elif event == 'remove_package': - if host_os_name == 'centos': - alert_regex = rf".*package deleted.*agent\".*\"name\":\"{host}\".*Erased: {operation_data['name']}.*{operation_data['version']}" - elif host_os_name == 'ubuntu': - alert_regex = rf".*\\(Debian Package\\) removed.*.*agent\".*\"name\":\"{host}.*package\":\"{operation_data['name']}\",\"arch\":\"amd64\",\"version\":\"{operation_data['version']}\"" - - elif event == 'update_package': - if host_os_name == 'centos': - alert_regex = rf".*package updated.*agent\".*\"name\":\"{host}\".*Updated: {operation_data['name']}.*{operation_data['version']}" - elif host_os_name == 'ubuntu': - alert_regex = rf".*New dpkg \\(Debian Package\\) installed.*.*agent\".*\"name\":\"{host}.*package\":\"{operation_data['name']}\",\"arch\":\"amd64\",\"version\":\"{operation_data['version']}\"" - - return alert_regex + expected_event = regexes[event['event']] + expected_regex = expected_event['regex'] + + if 'parameters' in expected_event and not 'parameters' in event: + raise Exception(f"Not provided enaugh data to create regex. Missing {event['PARAMETERS']}") + elif 'parameters' in event: + for parameter in expected_event['parameters']: + expected_regex = expected_regex.replace(parameter, event['parameters'][parameter]) + return expected_regex + @pytest.fixture(scope='module') def get_host_manager(request): inventory_path = request.config.getoption('--inventory-path') @@ -79,14 +80,6 @@ def get_host_manager(request): return host_manager, inventory_path -@pytest.fixture(scope='module') -def clean_environment(get_host_manager): - hm, inventory = get_host_manager - - for host in hm.get_group_hosts('manager'): - hm.modify_file_content(host, '/var/ossec/logs/alerts/alerts.json', '') - - @pytest.fixture(scope='module') def restart_environment(get_host_manager): host_manager, inventory = get_host_manager @@ -152,9 +145,10 @@ def configure_host(host): def launch_remote_operation(host, operation, operation_data, hm, inventory): print(f"Operation {operation} in {host} with {operation_data}") host_os_name = hm.get_host_variables(host)['os'].split('_')[0] + host_os_arch = hm.get_host_variables(host)['arch'] if operation == 'install_package': package_data = operation_data['package'] - package_url = package_data[host_os_name] + package_url = package_data[host_os_name][host_os_arch] package_manager = packages_manager[host_os_name] print(f"Install package {host} {package_url} {package_manager}") hm.install_package(host, package_url, package_manager ) @@ -185,80 +179,160 @@ def launch_parallel_operations(task_list, host_manager, inventory, group='agent' @pytest.fixture(scope='function') def setup(preconditions, teardown, get_host_manager): hm, inventory = get_host_manager - launch_parallel_operations(preconditions['tasks'], hm, inventory) - if 'check_alerts' in preconditions: - temp_file = create_monitoring_file(hm, preconditions['check_alerts'], '/var/ossec/logs/alerts/alerts.json', 120) - local_path = os.path.dirname(os.path.abspath(__file__)) - tmp_path = os.path.join(local_path, 'tmp') + if preconditions: + launch_parallel_operations(preconditions['tasks'], hm, inventory) + + if 'check_alerts' in preconditions: + monitoring_data = {} + + for agent in hm.get_group_hosts('agent'): + host_os_name = hm.get_host_variables(agent)['os'].split('_')[0] + check_alerts_data = preconditions['check_alerts'][host_os_name] + for event in check_alerts_data: + if not hm.get_host_variables(agent)['manager'] in monitoring_data: + monitoring_data[hm.get_host_variables(agent)['manager']] = [] + check_alerts_data['parameters']['HOST_NAME'] = agent + + regex = get_event_regex(preconditions['check_alerts'][host_os_name]) + monitoring_data[hm.get_host_variables('manager')] = [{ + 'regex': regex, + 'path': '/var/ossec/logs/alerts/alerts.json', + 'timeout': 30 + }] + + + for agent in hm.get_group_hosts('agent'): + host_os_name = hm.get_host_variables(agent)['os'].split('_')[0] + + for event in metadata_agent: + event['parameters']['HOST_NAME'] = agent + monitoring_element = { + 'regex': get_event_regex(event), + 'path': '/var/ossec/logs/alerts/alerts.json', + 'timeout': 120 + } + + if 'parameters' in metadata_agent: + monitoring_element['parameters'] = metadata_agent['parameters'] + + monitoring_data[hm.get_host_variables(agent)['manager']].append(monitoring_element) + + + + + - HostMonitor(inventory_path=inventory, messages_path=temp_file.name, tmp_path=tmp_path).run() - temp_file.close() + monitoring_events(get_host_manager, monitoring_data) yield - launch_parallel_operations(teardown, hm, inventory) + if teardown: + launch_parallel_operations(teardown, hm, inventory) - for host in hm.get_group_hosts('manager'): - hm.modify_file_content(host, path='/var/ossec/logs/alerts.json', content='') + # for host in hm.get_group_hosts('manager'): + # hm.modify_file_content(host, path='/var/ossec/logs/alerts/alerts.json', content='') -def create_monitoring_file(hm, operation_data, monitoring_file, timeout, group='agent'): - monitoring_file_content = '' - monitoring_data = {} - available_events = ['install_package', 'remove_package', 'update_package'] +def create_temp_file(content): + fd, temp_file_path = tempfile.mkstemp(text=True) # 'text=True' specifies text mode + with open(temp_file_path, 'w', newline='\n') as temp_file: + temp_file.write(content) + return temp_file_path - for host in hm.get_group_hosts(group): - host_os_name = hm.get_host_variables(host)['os'].split('_')[0] - for event in operation_data['check_alerts'][host_os_name]: - event_type = event['event'] - alerts_host = hm.get_host_variables(host)['manager'] - final_regex = get_operation_regex(host, hm, event, operation_data) +def monitoring_events(get_host_manager, monitoring_data): + hm, inventory = get_host_manager + monitoring_file_content = '' - monitoring = { - 'regex': f"{final_regex}", - 'path': f"{monitoring_file}", - 'timeout': f"{timeout}" - } - if alerts_host not in monitoring_data: - monitoring_data[alerts_host] = [] + for host, data in monitoring_data.items(): + monitoring_file_content += f"{host}:\n" + for monitoring_event in data: + monitoring_file_content += f" - regex: '{monitoring_event['regex']}'\n" + monitoring_file_content += f" path: '{monitoring_event['path']}'\n" + monitoring_file_content += f" timeout: {monitoring_event['timeout']}\n" - monitoring_data[alerts_host].append(monitoring) + temp_file = create_temp_file(monitoring_file_content) - for host, data in monitoring_data.items(): - monitoring_file_content += f"""\ - {host}: -""" - for regex_monitoring in data: - monitoring_file_content += f"""\ - - regex: "{regex_monitoring['regex']}" - path: "{regex_monitoring['path']}" - timeout: {regex_monitoring['timeout']} -""" - temp_file = tempfile.NamedTemporaryFile() + HostMonitor(inventory_path=inventory, messages_path=temp_file, tmp_path=tmp_path).run() - with open(temp_file.name, 'w+') as tmp: - # with tempfile.NamedTemporaryFile(delete=False) as tmp: - tmp.write(monitoring_file_content) + os.remove(temp_file) - return temp_file +@pytest.mark.dependency() +def test_syscollector_first_scan(get_host_manager): + """ + """ + hm, inventory = get_host_manager + regex_info = { + 'event': 'syscollector_first_scan_start' + } + + monitoring_data = {} + regex = get_event_regex(regex_info) + for agent in hm.get_group_hosts('agent'): + host_os_name = hm.get_host_variables(agent)['os'].split('_')[0] + monitoring_data[agent] = [{ + 'regex': regex, + 'path': logs_filepath_os['linux'], + 'timeout': 120 + }] + monitoring_events(get_host_manager, monitoring_data) +@pytest.mark.dependency(depends=["test_syscollector_first_scan"]) +def test_syscollector_second_scan(get_host_manager): + """ + """ + hm, inventory = get_host_manager + monitoring_data = {} + regex_info = { + 'event': 'syscollector_first_scan_start' + } + regex = get_event_regex(regex_info) + for agent in hm.get_group_hosts('agent'): + host_os_name = hm.get_host_variables(agent)['os'].split('_')[0] + monitoring_data[agent] = [{ + 'regex': regex, + 'path': logs_filepath_os['linux'], + 'timeout': 120 + }] + + monitoring_events(get_host_manager, monitoring_data) + + +# @pytest.mark.dependency(depends=["test_syscollector_second_scan"]) @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) -def test_vulnerability_detector(configure_environment_manager, preconditions, body, teardown, setup, get_host_manager): +def test_vulnerability_detector_scans(preconditions, body, teardown, setup, get_host_manager): """ """ hm, inventory = get_host_manager launch_parallel_operations(body['tasks'], hm, inventory) + metadata = body['check_alerts'] + monitoring_data = {} + for agent in hm.get_group_hosts('agent'): + host_os_name = hm.get_host_variables(agent)['os'].split('_')[0] + metadata_agent = metadata[host_os_name] + if not hm.get_host_variables(agent)['manager'] in monitoring_data: + monitoring_data[hm.get_host_variables(agent)['manager']] = [] + + for event in metadata_agent: + event['parameters']['HOST_NAME'] = agent + monitoring_element = { + 'regex': get_event_regex(event), + 'path': '/var/ossec/logs/alerts/alerts.json', + 'timeout': 120 + } + + if 'parameters' in metadata_agent: + monitoring_element['parameters'] = metadata_agent['parameters'] + + monitoring_data[hm.get_host_variables(agent)['manager']].append(monitoring_element) + - temp_file = create_monitoring_file(hm, body['check_alerts'], '/var/ossec/logs/alerts/alerts.json', 120) + print(monitoring_data) - local_path = os.path.dirname(os.path.abspath(__file__)) - tmp_path = os.path.join(local_path, 'tmp') - HostMonitor(inventory_path=inventory, messages_path=temp_file.name, tmp_path=tmp_path).run() - temp_file.close() \ No newline at end of file + monitoring_events(get_host_manager, monitoring_data) From 21176da539f42e496371f8142616e4dcff50ca7c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 14 Nov 2023 17:44:45 +0000 Subject: [PATCH 005/174] feat: include extra steps to Vulns verifications E2E --- .../wazuh_testing/tools/monitoring.py | 7 +- .../wazuh_testing/tools/system.py | 65 ++- .../ansible-wazuh-agent/defaults/main.yml | 1 + .../ansible-wazuh-agent/handlers/main.yml | 3 + .../wazuh/ansible-wazuh-agent/tasks/MacOS.yml | 12 + .../var-ossec-etc-ossec-agent.conf.j2 | 2 +- .../test_vulnerability_detector/cases.yaml | 174 ++++-- .../data/configurations/manager.yaml | 46 +- .../data/regex.yaml | 26 +- .../inventory.yaml | 186 ------- .../test_vulnerability_detector/test_scans.py | 502 ++++++++++++------ 11 files changed, 544 insertions(+), 480 deletions(-) delete mode 100644 tests/end_to_end/test_vulnerability_detector/inventory.yaml diff --git a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py index 87e0472c3f..5a4de5086a 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py @@ -916,7 +916,12 @@ def run(self, update_position=False): if len(monitored_files) == 0: raise AttributeError('There is no path to monitor. Exiting...') for path in monitored_files: - output_path = f'{host}_{path.split("/")[-1]}.tmp' + if '\\' in path: + first_path_element = path.split("\\")[-1] + else: + first_path_element = path.split("/")[-1] + + output_path = f'{host}_{first_path_element}.tmp' self._file_content_collectors.append(self.file_composer(host=host, path=path, output_path=output_path)) logger.debug(f'Add new file composer process for {host} and path: {path}') self._file_monitors.append(self._start(host=host, diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 985be570a6..f6e0fab106 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -53,8 +53,15 @@ def get_inventory(self) -> dict: """ return self.inventory + def get_inventory_path(self) -> str: + """Get the loaded Ansible inventory. + + Returns: + self.inventory: Ansible inventory + """ + return self.inventory_path - def get_group_hosts(self, pattern=None): + def get_group_hosts(self, pattern='None'): """Get all hosts from inventory that belong to a group. Args: @@ -68,6 +75,13 @@ def get_group_hosts(self, pattern=None): else: return [str(host) for host in self.inventory_manager.get_hosts()] + + def get_host_groups(self, host): + """ + """ + group_list = self.inventory_manager.get_host(host).get_groups() + return [str(group) for group in group_list] + def get_host_variables(self, host): """Get the variables of the specified host. @@ -92,6 +106,10 @@ def get_host(self, host: str): """ return testinfra.get_host(f"ansible://{host}?ansible_inventory={self.inventory_path}") + def truncate_file(self, host: str, filepath: str): + self.get_host(host).ansible("command", f"truncate -s 0 {filepath}", check=False) + + def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/ossec.conf', check: bool = False): """Move from src_path to the desired location dest_path for the specified host. @@ -101,9 +119,18 @@ def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/o dest_path (str): Destination path check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be applied. """ - self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=0775", - check=check) - + system = 'linux' + if 'os_name' in self.get_host_variables(host): + host_os_name = self.get_host_variables(host)['os_name'] + if host_os_name == 'windows': + system = 'windows' + + if system == 'linux': + a = self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=0644", + check=check) + print(a) + else: + self.get_host(host).ansible("ansible.windows.win_copy", f"src='{src_path}' dest='{dest_path}'", check=check) def add_block_to_file(self, host: str, path: str, replace: str, before: str, after, check: bool = False): """Add text block to desired file. @@ -400,24 +427,30 @@ def download_file(self, host, url, dest_path, mode='755'): a = self.get_host(host).ansible("get_url", f"url={url} dest={dest_path} mode={mode}", check=False) return a - def install_package(self, host, url, package_manager): + def install_package(self, host, url, system='ubuntu'): result = False - if package_manager == 'apt': + if system =='windows': + a = self.get_host(host).ansible("win_package", f"path={url} arguments=/S", check=False) + print(a) + elif system == 'ubuntu': a = self.get_host(host).ansible("apt", f"deb={url}", check=False) if a['changed'] == True and a['stderr'] == '': result = True - elif package_manager == 'yum': + elif system == 'centos': a = self.get_host(host).ansible("yum", f"name={url} state=present sslverify=false disable_gpg_check=True", check=False) if 'rc' in a and a['rc'] == 0 and a['changed'] == True: result = True - def remove_package(self, host, package_name, package_manager): + + def remove_package(self, host, package_name, system): result = False - if package_manager == 'apt': + if system == 'windows': + a = self.get_host(host).ansible("win_package", f"path={package_name} state=absent arguments=/S", check=False) + elif system == 'ubuntu': a = self.get_host(host).ansible("apt", f"name={package_name} state=absent", check=False) if a['changed'] == True and a['stderr'] == '': result = True - elif package_manager == 'yum': + elif system == 'centos': a = self.get_host(host).ansible("yum", f"name={package_name} state=absent", check=False) if 'rc' in a and a['rc'] == 0 and a['changed'] == True: result = True @@ -427,13 +460,21 @@ def handle_wazuh_services(self, host, operation): os = self.get_host_variables(host)['os_name'] binary_path = None if os == 'windows': - self.get_host(host).ansible('ansible.windows.win_command', f"cmd=NET {operation} Wazuh", check=False) + if operation == 'restart': + a = self.get_host(host).ansible('ansible.windows.win_shell', f'NET stop Wazuh', check=False) + b = self.get_host(host).ansible('ansible.windows.win_shell', f'NET start Wazuh', check=False) + + print(a) + print(b) + else: + a = self.get_host(host).ansible('ansible.windows.win_shell', f'NET {operation} Wazuh', check=False) + print(a) else: if os == 'linux': binary_path = f"/var/ossec/bin/wazuh-control" elif os == 'macos': binary_path = f"/Library/Ossec/bin/wazuh-control" - self.get_host(host).ansible('ansible.builtin.command', f'cmd="{binary_path} {operation}"', check=False) + self.get_host(host).ansible('shell', f"{binary_path} {operation}", check=False) def clean_environment(host_manager, target_files): diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/defaults/main.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/defaults/main.yml index 76e45a0888..9dbe620718 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/defaults/main.yml +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/defaults/main.yml @@ -60,6 +60,7 @@ wazuh_winagent_config_url: https://packages.wazuh.com/4.x/windows/wazuh-agent-4. wazuh_winagent_package_name: wazuh-agent-4.8.0-1.msi wazuh_winagent_package_name_generic: wazuh-agent.msi wazuh_dir: "/var/ossec" +wazuh_macos_dir: "/Library/Ossec" # This is deprecated, see: wazuh_agent_address wazuh_agent_nat: false diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml index 84f3ff4553..56d5a281eb 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml @@ -4,3 +4,6 @@ - name: Windows | Restart Wazuh Agent win_service: name=WazuhSvc start_mode=auto state=restarted + +- name: MacOS | restart wazuh-agent + ansible.builtin.shell: "{{ wazuh_macos_dir }}/bin/wazuh-control restart" \ No newline at end of file diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml index 78f28e8305..fb19fbd320 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml @@ -6,3 +6,15 @@ - include_tasks: "installation_from_custom_packages.yml" when: wazuh_custom_packages_installation_agent_enabled + +- name: MacOS | Installing agent configuration (ossec.conf) + template: + src: var-ossec-etc-ossec-agent.conf.j2 + dest: "{{ wazuh_macos_dir }}/etc/ossec.conf" + owner: root + group: wazuh + mode: 0644 + notify: restart wazuh-agent + tags: + - init + - config diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 b/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 index 8eef3d1dae..165d8e64b1 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 @@ -44,7 +44,7 @@ {{ wazuh_agent_config.enrollment.manager_address }} {% endif %} {% if wazuh_agent_config.enrollment.agent_name | length > 0 %} - {{ wazuh_agent_config.enrollment.agent_name }} + {{ ansible_hostname }} {% endif %} {% if wazuh_agent_config.enrollment.port is defined > 0 %} {{ wazuh_agent_config.enrollment.port }} diff --git a/tests/end_to_end/test_vulnerability_detector/cases.yaml b/tests/end_to_end/test_vulnerability_detector/cases.yaml index b1ac56fba3..2f548d2edf 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases.yaml @@ -13,70 +13,144 @@ amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb arm: null windows: - amd64: http://sourceforge.net/projects/firebird/files/firebird-win32/2.0.7-Release/Firebird-2.0.7.13318_0_win32.exe/download - check_alerts: + amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe + macos: + amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg + +# {"timestamp":"2023-11-14T10:52:41.932+0000","rule":{"level":10,"description":"CVE-2020-28924 affects rclone","id":"23505","firedtimes":392,"mail":false,"groups":["vulnerability-detector"],"gdpr":["IV_35.7.d"],"pci_dss":["11.2.1","11.2.3"],"tsc":["CC7.1","CC7.2"]},"agent":{"id":"002","name":"ip-172-31-12-122.ec2.internal","ip":"172.31.12.122"},"manager":{"name":"ip-172-31-6-24"},"id":"1699959161.19604197","cluster":{"name":"wazuh","node":"master"},"decoder":{"name":"json"},"data":{"vulnerability":{"package":{"name":"rclone","version":"1.49.5-1","architecture":"x86_64","condition":"Package less than 1.53.3"},"cvss":{"cvss2":{"vector":{"attack_vector":"network","access_complexity":"low","authentication":"none","confidentiality_impact":"partial","integrity_impact":"none","availability":"none"},"base_score":"5","exploitability_score":"10","impact_score":"2.900000"},"cvss3":{"vector":{"attack_vector":"network","access_complexity":"low","privileges_required":"none","user_interaction":"none","scope":"unchanged","confidentiality_impact":"high","integrity_impact":"none","availability":"none"},"base_score":"7.500000","exploitability_score":"3.900000","impact_score":"3.600000"}},"cve":"CVE-2020-28924","title":"CVE-2020-28924 affects rclone","rationale":"An issue was discovered in Rclone before 1.53.3. Due to the use of a weak random number generator, the password generator has been producing weak passwords with much less entropy than advertised. The suggested passwords depend deterministically on the time the second rclone was started. This limits the entropy of the passwords enormously. These passwords are often used in the crypt backend for encryption of data. It would be possible to make a dictionary of all possible passwords with about 38 million entries per password length. This would make decryption of secret material possible with a plausible amount of effort. NOTE: all passwords generated by affected versions should be changed.","severity":"High","published":"2020-11-19","updated":"2022-04-26","cwe_reference":"CWE-331","status":"Active","type":"PACKAGE","references":["https://github.com/rclone/rclone/issues/4783","https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/UJIFT24Q6EFXLQZ24AER2QGFFZLMIPCD/","https://rclone.org/downloads/","https://security.gentoo.org/glsa/202107-14","https://nvd.nist.gov/vuln/detail/CVE-2020-28924"],"assigner":"cve@mitre.org"}},"location":"vulnerability-detector"} + + check_agent_alert_indexer: centos: - - event: syscollector_install_package_alert_yum - parameters: - PACKAGE_NAME: "rclone" - PACKAGE_VERSION: "1.49.5" - ubuntu: - - event: syscollector_install_package_alert_apt - parameters: - PACKAGE_NAME: "rclone" - PACKAGE_VERSION: "1.49.5" - teardown: - - remove_package: - package: - centos: rclone - ubuntu: rclone - -- case: "Updating a vulnerable package that remains vulnerable to the same CVE" - id: "update_vuln_package_remain_vulnerable" - description: "Updating a vulnerable package that remains vulnerable to the same CVE" - preconditions: - tasks: - - install_package: - package: - centos: - amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm - ubuntu: - amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb - check_alerts: - centos: + amd64: - event: syscollector_install_package_alert_yum parameters: PACKAGE_NAME: "rclone" PACKAGE_VERSION: "1.49.5" - ubuntu: - - event: syscollector_install_package_alert_apt + arm: + - event: syscollector_install_package_alert_yum parameters: PACKAGE_NAME: "rclone" PACKAGE_VERSION: "1.49.5" - body: - tasks: - - install_package: - package: - centos: - amd64: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.rpm - ubuntu: - amd64: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.deb - check_alerts: - centos: - - event: upgrade_package - parameters: - PACKAGE_NAME: "rclone" - PACKAGE_VERSION: "1.50.0" ubuntu: - - event: upgrade_package + amd64: + - event: syscollector_install_package_alert_apt + parameters: + PACKAGE_NAME: "rclone" + PACKAGE_VERSION: "1.49.5" + arm: + - event: syscollector_install_package_alert_apt parameters: PACKAGE_NAME: "rclone" - PACKAGE_VERSION: "1.50.0" + PACKAGE_VERSION: "1.49.5" + windows: + amd64: + - event: syscollector_install_package_alert_apt + parameters: + PACKAGE_NAME: "VideoLAN" + PACKAGE_VERSION: "3.0.6" + macos: + amd64: + - event: syscollector_install_package_alert_apt + parameters: + PACKAGE_NAME: + PACKAGE_VERSION: + + + # check_alerts: + # centos: + # amd64: + # - event: syscollector_install_package_alert_yum + # parameters: + # PACKAGE_NAME: "rclone" + # PACKAGE_VERSION: "1.49.5" + # arm: + # - event: syscollector_install_package_alert_yum + # parameters: + # PACKAGE_NAME: "rclone" + # PACKAGE_VERSION: "1.49.5" + # ubuntu: + # amd64: + # - event: syscollector_install_package_alert_apt + # parameters: + # PACKAGE_NAME: "rclone" + # PACKAGE_VERSION: "1.49.5" + # arm: + # - event: syscollector_install_package_alert_apt + # parameters: + # PACKAGE_NAME: "rclone" + # PACKAGE_VERSION: "1.49.5" + # windows: + # amd64: + # - event: syscollector_install_package_alert_apt + # parameters: + # PACKAGE_NAME: "VideoLAN" + # PACKAGE_VERSION: "3.0.6" + # macos: + # amd64: + # - event: syscollector_install_package_alert_apt + # parameters: + # PACKAGE_NAME: + # PACKAGE_VERSION: teardown: - remove_package: package: - centos: rclone - ubuntu: rclone + centos: + amd: rclone + arm: rclone + ubuntu: + amd: rclone + arm: rclone + windows: + amd64: C:\\\\Program Files\\\\VideoLAN\\\\VLC\\\\uninstall.exe + macos: + amd64: + +# - case: "Updating a vulnerable package that remains vulnerable to the same CVE" +# id: "update_vuln_package_remain_vulnerable" +# description: "Updating a vulnerable package that remains vulnerable to the same CVE" +# preconditions: +# tasks: +# - install_package: +# package: +# centos: +# amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm +# ubuntu: +# amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb +# check_alerts: +# centos: +# - event: syscollector_install_package_alert_yum +# parameters: +# PACKAGE_NAME: "rclone" +# PACKAGE_VERSION: "1.49.5" +# ubuntu: +# - event: syscollector_install_package_alert_apt +# parameters: +# PACKAGE_NAME: "rclone" +# PACKAGE_VERSION: "1.49.5" +# body: +# tasks: +# - install_package: +# package: +# centos: +# amd64: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.rpm +# ubuntu: +# amd64: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.deb +# check_alerts: +# centos: +# - event: syscollector_upgrade_package_alert_yum +# parameters: +# PACKAGE_NAME: "rclone" +# PACKAGE_VERSION: "1.50.0" +# ubuntu: +# - event: syscollector_install_package_alert_apt +# parameters: +# PACKAGE_NAME: "rclone" +# PACKAGE_VERSION: "1.50.0" +# teardown: +# - remove_package: +# package: +# centos: rclone +# ubuntu: rclone # ---- diff --git a/tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml b/tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml index 27a2b3bbb9..e3c38cdb8d 100644 --- a/tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml +++ b/tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml @@ -1,60 +1,40 @@ - sections: - section: vulnerability-detector elements: + - interval: + value: 5m - enabled: value: 'yes' - run_on_start: value: 'yes' - # - provider: - # attributes: - # - name: 'redhat' - # elements: - # - enabled: - # value: 'yes' - # - os: - # attributes: - # - path: CUSTOM_REDHAT_OVAL_FEED - # value: OS - # - path: - # value: CUSTOM_REDHAT_JSON_FEED - provider: attributes: - - name: 'canonical' + - name: 'redhat' elements: - enabled: - value: 'no' + value: 'yes' + - os: + value: 7 - provider: attributes: - - name: 'debian' + - name: 'canonical' elements: - enabled: - value: 'no' + value: 'yes' + - os: + value: jammy - provider: attributes: - name: 'msu' elements: - enabled: - value: 'no' - - provider: - attributes: - - name: 'alas' - elements: - - enabled: - value: 'no' + value: 'yes' - provider: attributes: - - name: 'arch' + - name: 'nvd' elements: - enabled: - value: 'no' - # - provider: - # attributes: - # - name: 'nvd' - # elements: - # - enabled: - # value: 'yes' - # - path: - # value: CUSTOM_NVD_JSON_FEED + value: 'yes' - section: sca elements: diff --git a/tests/end_to_end/test_vulnerability_detector/data/regex.yaml b/tests/end_to_end/test_vulnerability_detector/data/regex.yaml index 16b9546824..fdce5052d2 100644 --- a/tests/end_to_end/test_vulnerability_detector/data/regex.yaml +++ b/tests/end_to_end/test_vulnerability_detector/data/regex.yaml @@ -1,8 +1,8 @@ ### Syscollector Events -syscollector_first_scan_start: +syscollector_scan_start: regex: ".*INFO: Starting evaluation." -syscollector_first_scan_end: +syscollector_scan_end: regex: ".*INFO: Starting evaluation." syscollector_install_package_alert_yum: @@ -13,22 +13,6 @@ syscollector_install_package_alert_apt: regex: '.*New dpkg \(Debian Package\) installed.*.*agent".*"name":"HOST_NAME.*package":"PACKAGE_NAME","arch":"amd64","version":"PACKAGE_VERSION"' parameters: ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] - -# syscollector_remove_package_alert_yum: -# regex: ".*installed.*agent".*"name":"HOST_NAME".*Installed: PACKAGE_NAME.*PACKAGE_VERSION" -# variables: ['PACKAGE_NAME', 'PACKAGE_VERSION'] - -# syscollector_remove_package_alert_apt: -# regex: ".*New dpkg \\(Debian Package\\) installed.*.*agent".*"name":"HOST_NAME.*package":"PACKAGE_NAME","arch":"amd64","version":"PACKAGE_VERSION"" -# variables: ['PACKAGE_NAME', 'PACKAGE_VERSION'] - - - - -# syscollector_upgrade_package_alert_yum: -# regex: ".*installed.*agent".*"name":"HOST_NAME".*Installed: PACKAGE_NAME.*PACKAGE_VERSION" -# variables: ['PACKAGE_NAME', 'PACKAGE_VERSION'] - -# syscollector_upgrade_package_alert_apt: -# regex: ".*New dpkg \\(Debian Package\\) installed.*.*agent".*"name":"HOST_NAME.*package":"PACKAGE_NAME","arch":"amd64","version":"PACKAGE_VERSION"" -# variables: ['PACKAGE_NAME', 'PACKAGE_VERSION'] +syscollector_upgrade_package_alert_yum: + regex: '.*Yum package updated.*agent".*"name":"HOST_NAME".*Updated: PACKAGE_NAME.*PACKAGE_VERSION' + parameters: ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] diff --git a/tests/end_to_end/test_vulnerability_detector/inventory.yaml b/tests/end_to_end/test_vulnerability_detector/inventory.yaml deleted file mode 100644 index 419d4933e1..0000000000 --- a/tests/end_to_end/test_vulnerability_detector/inventory.yaml +++ /dev/null @@ -1,186 +0,0 @@ -manager: - hosts: - manager1: - ip: 172.31.8.185 - version: 4.4.5 - revision: 1 - repository: live - type: master - os: ubuntu_22 - service: EC2 - resources: - cpu: 4 - memory: 8192 - ansible_host: 172.31.8.185 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - manager2: - ip: 172.31.7.16 - version: 4.4.5 - revision: 1 - repository: live - type: worker - os: ubuntu_22 - service: EC2 - resources: - cpu: 2 - memory: 4096 - ansible_host: 172.31.7.16 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - vars: {} -filebeat: - hosts: - manager1: - ip: 172.31.8.185 - os: ubuntu_22 - service: EC2 - resources: - cpu: 4 - memory: 8192 - ansible_host: 172.31.8.185 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - manager2: - ip: 172.31.7.16 - os: ubuntu_22 - service: EC2 - resources: - cpu: 2 - memory: 4096 - ansible_host: 172.31.7.16 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - vars: {} -indexer: - hosts: - manager1: - ip: 172.31.8.185 - version: 4.4.5 - revision: 1 - repository: live - os: ubuntu_22 - service: EC2 - resources: - cpu: 4 - memory: 8192 - ansible_host: 172.31.8.185 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - indexer_user: admin - indexer_password: changeme - vars: {} -qa_framework: - hosts: - manager1: - ip: 172.31.8.185 - qa_repository_reference: enhacement/4590-vd-basic-test-cases - os: ubuntu_22 - service: EC2 - resources: - cpu: 4 - memory: 8192 - ansible_host: 172.31.8.185 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - manager2: - ip: 172.31.7.16 - qa_repository_reference: enhacement/4590-vd-basic-test-cases - os: ubuntu_22 - service: EC2 - resources: - cpu: 2 - memory: 4096 - ansible_host: 172.31.7.16 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - ip-172-31-8-185: - ip: 172.31.15.144 - qa_repository_reference: enhacement/4590-vd-basic-test-cases - os: centos_7 - service: EC2 - ansible_host: 172.31.15.144 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.11 - ip-172-31-6-71: - ip: 172.31.6.71 - qa_repository_reference: enhacement/4590-vd-basic-test-cases - os: ubuntu_22 - service: EC2 - ansible_host: 172.31.6.71 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - vars: {} -agent: - children: - linux: - hosts: - ip-172-31-8-185: - ip: 172.31.15.144 - version: 4.4.5 - revision: 1 - repository: live - manager: manager1 - os: centos_7 - service: EC2 - ansible_host: 172.31.15.144 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.11 - manager_ip: 172.31.8.185 - arch: amd64 - ip-172-31-6-71: - ip: 172.31.6.71 - version: 4.4.5 - revision: 1 - repository: live - manager: manager1 - os: ubuntu_22 - service: EC2 - ansible_host: 172.31.6.71 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - manager_ip: 172.31.8.185 - arch: amd64 - macos: - hosts: {} - solaris: - hosts: {} - -all: - vars: - ansible_ssh_common_args: -o StrictHostKeyChecking=no - ansible_winrm_server_cert_validation: ignore - ansible_ssh_private_key_file: /home/rebits/.ssh/JenkinsEphemeral2.pem \ No newline at end of file diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_scans.py index 84003cc70f..1b32481e5b 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_scans.py +++ b/tests/end_to_end/test_vulnerability_detector/test_scans.py @@ -1,25 +1,25 @@ -import pytest -import os -import pytest -import os -import subprocess import argparse -import ansible_runner import base64 +import os import re +import subprocess +import tempfile from multiprocessing.pool import ThreadPool -from wazuh_testing.tools.configuration import ( - load_configuration_template, set_section_wazuh_conf -) + +import pytest +import ansible_runner import xml.dom.minidom import yaml -import tempfile - -from wazuh_testing.tools.system import HostManager from ansible.inventory.manager import InventoryManager from ansible.parsing.dataloader import DataLoader -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.configuration import ( + load_configuration_template, set_section_wazuh_conf +) +from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system import HostManager +from wazuh_testing.api import make_api_call, get_token_login_api +from wazuh_testing.end_to_end import get_alert_indexer_api current_dir = os.path.dirname(__file__) configurations_dir = os.path.join(current_dir, "data", "configurations") @@ -27,17 +27,12 @@ local_path = os.path.dirname(os.path.abspath(__file__)) tmp_path = os.path.join(local_path, 'tmp') regex_path = os.path.join(current_dir, 'data', 'regex.yaml') +STATE_INDEX_NAME = 'agents_state_index' with open(os.path.join(current_dir, 'cases.yaml'), 'r') as cases_file: cases = yaml.load(cases_file, Loader=yaml.FullLoader) - -packages_manager = { - 'centos': 'yum', - 'ubuntu': 'apt' -} - configurations_paths = { 'manager': os.path.join(configurations_dir, 'manager.yaml'), 'agent': os.path.join(configurations_dir, 'agent.yaml') @@ -55,183 +50,236 @@ } -def get_event_regex(event, operation_data=None): - regexes = {} - with open(regex_path, 'r') as regex_file: - regexes = yaml.load(regex_file, Loader=yaml.FullLoader) - - expected_event = regexes[event['event']] - expected_regex = expected_event['regex'] - if 'parameters' in expected_event and not 'parameters' in event: - raise Exception(f"Not provided enaugh data to create regex. Missing {event['PARAMETERS']}") - elif 'parameters' in event: - for parameter in expected_event['parameters']: - expected_regex = expected_regex.replace(parameter, event['parameters'][parameter]) +complete_list = [ (case['preconditions'], case['body'], case['teardown']) for case in cases] +dependencies = [None if 'depends' not in case else pytest.mark.depends(name=case['depend']) for case in cases] +list_ids = [ case['id'] for case in cases] - return expected_regex @pytest.fixture(scope='module') -def get_host_manager(request): - inventory_path = request.config.getoption('--inventory-path') - host_manager = HostManager(inventory_path) +def setup_vulnerability_tests(host_manager): + # Configure managers and agents + hosts_configuration_backup = backup_configurations(host_manager) + configure_environment_manager(host_manager, load_vulnerability_detector_configurations()) - return host_manager, inventory_path + # Restart managers and stop agents + control_environment(host_manager, 'stop', ['agent']) + control_environment(host_manager, 'restart', ['manager']) + # Wait until VD is updated + wait_until_vd_is_updated(host_manager) -@pytest.fixture(scope='module') -def restart_environment(get_host_manager): - host_manager, inventory = get_host_manager - for host in host_manager.get_group_hosts('manager'): - host_manager.handle_wazuh_services(host, 'restart') + # Truncate alerts and logs of managers and agents + truncate_logs(host_manager) + + # Start agents + control_environment(host_manager, 'start', ['agent']) - for host in host_manager.get_group_hosts('agent'): - host_manager.handle_wazuh_services(host, 'restart') + yield + restore_backup(host_manager, hosts_configuration_backup) -@pytest.fixture(scope='module', autouse=False) -def configure_environment_manager(get_host_manager): - def configure_host(host): - host_variables = host.get_vars() +def backup_configurations(host_manager): + backup_configurations = {} + for host in host_manager.get_group_hosts('all'): + host_variables = host_manager.get_host_variables(host) host_os = host_variables['os_name'] configuration_file_path = configuration_filepath_os[host_os] + current_configuration = host_manager.get_file_content(str(host), configuration_file_path) + backup_configurations[str(host)] = current_configuration + return backup_configurations + + +def restore_backup(host_manager, backup_configurations): + for host in host_manager.get_group_hosts('all'): + host_variables = host_manager.get_host_variables(host) + host_os = host_variables['os_name'] + configuration_file_path = configuration_filepath_os[host_os] + host_manager.modify_file_content(str(host), configuration_file_path, backup_configurations[str(host)]) + + +def load_vulnerability_detector_configurations(): + return { + 'agent': load_configuration_template(configurations_paths['agent'], [{}], [{}]), + 'manager': load_configuration_template(configurations_paths['manager'], [{}], [{}]) + } + +def configure_environment_manager(host_manager, configurations): + def configure_host(host, host_configuration_role): + host_os = host_manager.get_host_variables(host)['os_name'] + configuration_file_path = configuration_filepath_os[host_os] + host_groups = host_manager.get_host_groups(host) host_configuration = None - host_groups = [str(group) for group in host.get_groups()] if 'manager' in host_groups: - host_configuration = configurations_paths['manager'] + host_configuration = host_configuration_role['manager'] elif 'agent' in host_groups: - host_configuration = configurations_paths['agent'] + host_configuration = host_configuration_role['agent'] current_configuration = host_manager.get_file_content(str(host), configuration_file_path) - backup_configurations[host] = current_configuration - new_configuration_template = load_configuration_template(host_configuration, [{}], [{}]) - new_configuration = set_section_wazuh_conf(new_configuration_template[0].get('sections'), current_configuration.split("\n")) + new_configuration = set_section_wazuh_conf(host_configuration[0].get('sections'), current_configuration.split("\n")) + new_configuration = [line for line in new_configuration if line.strip() != ""] dom = xml.dom.minidom.parseString(''.join(new_configuration)) new_configuration = "\n".join(dom.toprettyxml().split("\n")[1:]) host_manager.modify_file_content(str(host), configuration_file_path, new_configuration) - backup_configurations = {} + loader = DataLoader() + configure_environment_parallel_map = [ (host, configurations) for host in host_manager.get_group_hosts('all')] - host_manager, inventory = get_host_manager + with ThreadPool() as pool: + pool.starmap(configure_host, configure_environment_parallel_map) - loader = DataLoader() - inventory_manager = InventoryManager(loader=loader, sources=inventory) - all_hosts = inventory_manager.get_hosts() +def control_environment(host_manager, operation, group_list): + for group in group_list: + for host in host_manager.get_group_hosts(group): + host_manager.handle_wazuh_services(host, operation) - with ThreadPool() as pool: - pool.map(configure_host, all_hosts) +def get_event_regex(event, operation_data=None): + """ + """ + regexes = {} + with open(regex_path, 'r') as regex_file: + regexes = yaml.load(regex_file, Loader=yaml.FullLoader) - yield + expected_event = regexes[event['event']] + expected_regex = expected_event['regex'] - for host in all_hosts: - host_variables = host.get_vars() - host_os = host_variables['os_name'] - configuration_file_path = configuration_filepath_os[host_os] + if 'parameters' in expected_event and not 'parameters' in event: + raise Exception(f"Not provided enaugh data to create regex. Missing {event['PARAMETERS']}") + elif 'parameters' in event: + for parameter in expected_event['parameters']: + expected_regex = expected_regex.replace(parameter, event['parameters'][parameter]) - host_manager.modify_file_content(str(host), configuration_file_path, backup_configurations[host]) -complete_list = [ (case['preconditions'], case['body'], case['teardown']) for case in cases] -list_ids = [ case['id'] for case in cases] + return expected_regex + + +@pytest.fixture(scope='module') +def host_manager(request): + inventory_path = request.config.getoption('--inventory-path') + manager = HostManager(inventory_path) + + return manager +def truncate_agents_logs(host_manager): + for agent in host_manager.get_group_hosts('agent'): + host_os_name = host_manager.get_host_variables(agent)['os_name'] + host_manager.truncate_file(agent, logs_filepath_os[host_os_name]) -def launch_remote_operation(host, operation, operation_data, hm, inventory): +def truncate_managers_logs(host_manager): + for agent in host_manager.get_group_hosts('manager'): + host_os_name = host_manager.get_host_variables(agent)['os_name'] + host_manager.truncate_file(agent, logs_filepath_os[host_os_name]) + +def truncate_logs(host_manager): + # for manager in host_manager.get_group_hosts('manager'): + # host_manager.truncate_file(manager, '/var/ossec/logs/alerts/alerts.json') + truncate_managers_logs(host_manager) + truncate_agents_logs(host_manager) + + +def wait_until_vd_is_updated(host_manager): + monitoring_data = {} + for manager in host_manager.get_group_hosts('manager'): + monitoring_data = generate_monitoring_logs_manager(host_manager, manager, 'Starting vulnerability scan', 600) + + monitoring_events(host_manager, monitoring_data) + + +def wait_until_vuln_scan_finished(host_manager): + monitoring_data = {} + for manager in host_manager.get_group_hosts('manager'): + monitoring_data = generate_monitoring_logs_manager(host_manager, manager, 'Vulnerability scan finished.', 600) + + monitoring_events(host_manager, monitoring_data) + + +def launch_remote_operation(host, operation, operation_data, host_manager): print(f"Operation {operation} in {host} with {operation_data}") - host_os_name = hm.get_host_variables(host)['os'].split('_')[0] - host_os_arch = hm.get_host_variables(host)['arch'] + host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] + host_os_arch = host_manager.get_host_variables(host)['arch'] + + system = host_manager.get_host_variables(host)['os_name'] + if system == 'linux': + system = host_manager.get_host_variables(host)['os'].split('_')[0] + + if operation == 'install_package': package_data = operation_data['package'] package_url = package_data[host_os_name][host_os_arch] - package_manager = packages_manager[host_os_name] - print(f"Install package {host} {package_url} {package_manager}") - hm.install_package(host, package_url, package_manager ) + host_manager.install_package(host, package_url, system ) elif operation == 'remove_package': package_data = operation_data['package'] package_name = package_data[host_os_name] - package_manager = packages_manager[host_os_name] - hm.remove_package(host, package_name, package_manager ) + host_manager.remove_package(host, package_name, system) -def launch_remote_sequential_operation_on_agent(agent, task_list, host_manager, inventory): +def launch_remote_sequential_operation_on_agent(agent, task_list, host_manager): if task_list: for task in task_list: task_keys = list(task.keys()) task_values = list(task.values()) operation, operation_data = task_keys[0], task_values[0] - launch_remote_operation(agent, operation, operation_data, host_manager, inventory) + launch_remote_operation(agent, operation, operation_data, host_manager) -def launch_parallel_operations(task_list, host_manager, inventory, group='agent'): +def launch_parallel_operations(task_list, host_manager, group='agent'): agents = host_manager.get_group_hosts('agent') - parallel_configuration = [(agent, task_list, host_manager, inventory) for agent in agents] + parallel_configuration = [(agent, task_list, host_manager) for agent in agents] with ThreadPool() as pool: # Use the pool to map the function to the list of hosts pool.starmap(launch_remote_sequential_operation_on_agent, parallel_configuration) @pytest.fixture(scope='function') -def setup(preconditions, teardown, get_host_manager): - hm, inventory = get_host_manager +def setup(preconditions, teardown, host_manager): + host_manager = host_manager if preconditions: - launch_parallel_operations(preconditions['tasks'], hm, inventory) + launch_parallel_operations(preconditions['tasks'], host_manager) if 'check_alerts' in preconditions: monitoring_data = {} - for agent in hm.get_group_hosts('agent'): - host_os_name = hm.get_host_variables(agent)['os'].split('_')[0] + for agent in host_manager.get_group_hosts('agent'): + host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] check_alerts_data = preconditions['check_alerts'][host_os_name] - for event in check_alerts_data: - if not hm.get_host_variables(agent)['manager'] in monitoring_data: - monitoring_data[hm.get_host_variables(agent)['manager']] = [] - check_alerts_data['parameters']['HOST_NAME'] = agent - - regex = get_event_regex(preconditions['check_alerts'][host_os_name]) - monitoring_data[hm.get_host_variables('manager')] = [{ - 'regex': regex, - 'path': '/var/ossec/logs/alerts/alerts.json', - 'timeout': 30 - }] - - - for agent in hm.get_group_hosts('agent'): - host_os_name = hm.get_host_variables(agent)['os'].split('_')[0] - - for event in metadata_agent: - event['parameters']['HOST_NAME'] = agent - monitoring_element = { - 'regex': get_event_regex(event), - 'path': '/var/ossec/logs/alerts/alerts.json', - 'timeout': 120 - } - - if 'parameters' in metadata_agent: - monitoring_element['parameters'] = metadata_agent['parameters'] - - monitoring_data[hm.get_host_variables(agent)['manager']].append(monitoring_element) - + for event in check_alerts_data: + if not host_manager.get_host_variables(agent)['manager'] in monitoring_data: + monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] + if not 'parameters' in event: + event['parameters'] = {} + event['parameters']['HOST_NAME'] = agent + regex = get_event_regex(event) + monitoring_element = { + 'regex': regex, + 'path': '/var/ossec/logs/alerts/alerts.json', + 'timeout': 30, + 'parameters': event['parameters'] + } + monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) - monitoring_events(get_host_manager, monitoring_data) + monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) + monitoring_events(host_manager, monitoring_data) yield if teardown: - launch_parallel_operations(teardown, hm, inventory) + launch_parallel_operations(teardown, host_manager) - # for host in hm.get_group_hosts('manager'): - # hm.modify_file_content(host, path='/var/ossec/logs/alerts/alerts.json', content='') + for host in host_manager.get_group_hosts('manager'): + host_manager.truncate_file(host, '/var/ossec/logs/alerts/alerts.json') def create_temp_file(content): @@ -241,9 +289,9 @@ def create_temp_file(content): return temp_file_path -def monitoring_events(get_host_manager, monitoring_data): - hm, inventory = get_host_manager +def monitoring_events(host_manager, monitoring_data): monitoring_file_content = '' + results = {} for host, data in monitoring_data.items(): monitoring_file_content += f"{host}:\n" @@ -252,87 +300,189 @@ def monitoring_events(get_host_manager, monitoring_data): monitoring_file_content += f" path: '{monitoring_event['path']}'\n" monitoring_file_content += f" timeout: {monitoring_event['timeout']}\n" - temp_file = create_temp_file(monitoring_file_content) + temp_file = create_temp_file(monitoring_file_content) + try: + results.update(HostMonitor(inventory_path=host_manager.get_inventory_path(), messages_path=temp_file, tmp_path=tmp_path).run()) + except TimeoutError: + pass - HostMonitor(inventory_path=inventory, messages_path=temp_file, tmp_path=tmp_path).run() + os.remove(temp_file) - os.remove(temp_file) + return results -@pytest.mark.dependency() -def test_syscollector_first_scan(get_host_manager): - """ - """ - hm, inventory = get_host_manager - regex_info = { - 'event': 'syscollector_first_scan_start' - } - +def generate_monitoring_logs_all_agent(host_manager, regex_list, timeout_list): monitoring_data = {} - regex = get_event_regex(regex_info) - for agent in hm.get_group_hosts('agent'): - host_os_name = hm.get_host_variables(agent)['os'].split('_')[0] - monitoring_data[agent] = [{ - 'regex': regex, - 'path': logs_filepath_os['linux'], - 'timeout': 120 - }] + for agent in host_manager.get_group_hosts('agent'): + monitoring_data[agent] = [] + for index, regex_index in enumerate(regex_list): + os_name = host_manager.get_host_variables(agent)['os_name'] + monitoring_data[agent].append({ + 'regex': regex_index, + 'path': logs_filepath_os[os_name], + 'timeout': timeout_list[index] - monitoring_events(get_host_manager, monitoring_data) + }) + print(monitoring_data) + return monitoring_data -@pytest.mark.dependency(depends=["test_syscollector_first_scan"]) -def test_syscollector_second_scan(get_host_manager): - """ - """ - hm, inventory = get_host_manager + +def generate_monitoring_logs_manager(host_manager, manager, regex, timeout): monitoring_data = {} - regex_info = { - 'event': 'syscollector_first_scan_start' - } - regex = get_event_regex(regex_info) - for agent in hm.get_group_hosts('agent'): - host_os_name = hm.get_host_variables(agent)['os'].split('_')[0] - monitoring_data[agent] = [{ - 'regex': regex, - 'path': logs_filepath_os['linux'], - 'timeout': 120 - }] + os_name = host_manager.get_host_variables(manager)['os_name'] + monitoring_data[manager] = [{ + 'regex': regex, + 'path': logs_filepath_os[os_name], + 'timeout': timeout - monitoring_events(get_host_manager, monitoring_data) + }] + return monitoring_data -# @pytest.mark.dependency(depends=["test_syscollector_second_scan"]) -@pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) -def test_vulnerability_detector_scans(preconditions, body, teardown, setup, get_host_manager): - """ - """ - hm, inventory = get_host_manager - launch_parallel_operations(body['tasks'], hm, inventory) - metadata = body['check_alerts'] + +def generate_monitoring_alerts_all_agent(host_manager, events_metadata): monitoring_data = {} - for agent in hm.get_group_hosts('agent'): - host_os_name = hm.get_host_variables(agent)['os'].split('_')[0] - metadata_agent = metadata[host_os_name] - if not hm.get_host_variables(agent)['manager'] in monitoring_data: - monitoring_data[hm.get_host_variables(agent)['manager']] = [] - for event in metadata_agent: + for agent in host_manager.get_group_hosts('agent'): + host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] + metadata_agent = events_metadata[host_os_name] + + if not host_manager.get_host_variables(agent)['manager'] in monitoring_data: + monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] + + for event in metadata_agent[agent.get_host_variables(agent)['arch']]: event['parameters']['HOST_NAME'] = agent monitoring_element = { 'regex': get_event_regex(event), 'path': '/var/ossec/logs/alerts/alerts.json', - 'timeout': 120 + 'timeout': 120, } if 'parameters' in metadata_agent: monitoring_element['parameters'] = metadata_agent['parameters'] - monitoring_data[hm.get_host_variables(agent)['manager']].append(monitoring_element) + monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) - print(monitoring_data) +def get_master_ip(host_manager): + for manager in host_manager.get_group_hosts('manager'): + if host_manager.get_host_variables(manager)['type'] == 'master': + return host_manager.get_host_variables(manager)['ip'] + + +def get_state_index(host_manager): + url = f"https://{get_master_ip(host_manager)}:9200/{STATE_INDEX_NAME}_search?" + + response = requests.get(url=url, params={'pretty': 'true'}, json=query, verify=False, + auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password'])) + + return response.text + + +def get_agents_id(host_manager): + API_PROTOCOL = 'https' + API_HOST = get_master_ip(host_manager) + API_PORT = '55000' + API_USER = 'wazuh' + API_PASS = 'wazuh' + API_LOGIN_ENDPOINT = '/security/user/authenticate' + + response_token = get_token_login_api(API_PROTOCOL, API_HOST, API_PORT, API_USER, API_PASS, API_LOGIN_ENDPOINT, + timeout=10, login_attempts=3, sleep_time=1) + + agent_output = make_api_call(get_master_ip(host_manager), endpoint='/agents', token=response_token).json() + agents_ids = {} + for agent in agent_output['data']['affected_items']: + agents_ids[agent['name']] = agent['id'] + + return agents_ids + + +def get_agents_vulnerabilities(host_manager): + API_PROTOCOL = 'https' + API_HOST = get_master_ip(host_manager) + API_PORT = '55000' + API_USER = 'wazuh' + API_PASS = 'wazuh' + API_LOGIN_ENDPOINT = '/security/user/authenticate' + + response_token = get_token_login_api(API_PROTOCOL, API_HOST, API_PORT, API_USER, API_PASS, API_LOGIN_ENDPOINT, + timeout=10, login_attempts=3, sleep_time=1) + + agents_ids = get_agents_id(host_manager) + agents_vuln = {} + for agent in host_manager.get_group_hosts('agent'): + agents_vuln[agent] = make_api_call(get_master_ip(host_manager), endpoint=f"/vulnerability/{agents_ids[agent]}", token=response_token).json()['data']['affected_items'] + + return agents_vuln + +@pytest.mark.dependency() +def test_syscollector_initial_scans(host_manager): + # The Agent's syscollector scan is run + monitoring_data = generate_monitoring_logs_all_agent(host_manager, + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [20, 20]) + + results = monitoring_events(host_manager, monitoring_data) + + assert all(results.values()), f"Expected message was not triggered for some agents, {results}" + + truncate_agents_logs(host_manager) + + wait_until_vuln_scan_finished(host_manager) + + # Check vulnerabilities for agent + agents_vuln_before_second_scan = get_agents_vulnerabilities(host_manager) + for agent, vuln in agents_vuln_before_second_scan.items(): + assert vuln, f"No vulnerabilities were detected for agent {agent}" + + # Check Agent's System states are stored + state_index_content_before_second_scan = get_state_index(host_manager) + + # Compare agents_vuln_before_second_scan with state_index_content + # To Do + + # The Agent's syscollector scan is run + monitoring_data = generate_monitoring_logs_all_agent(host_manager, + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [60, 60]) + + results = monitoring_events(host_manager, monitoring_data) + + assert all(results.values()), f"Expected message was not triggered for some agents, {results}" + + truncate_managers_logs(host_manager) + + wait_until_vuln_scan_finished(host_manager) + + agents_vuln_after_second_scan = get_agents_vulnerabilities(host_manager) + + + assert agents_vuln_before_second_scan == agents_vuln_after_second_scan + + # Check Agent's System states are stored + state_index_content_after_second_scan = get_state_index(host_manager) + + assert state_index_content_after_second_scan == state_index_content_before_second_scan + + +# @pytest.mark.dependency(depends=["test_syscollector_second_scan"]) +@pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) +def test_vulnerability_detector_scans(preconditions, body, teardown, setup, host_manager): + # Launch tests tasks + launch_parallel_operations(body['tasks'], host_manager) + + # Check vulnerability + agents_vuln_after_second_scan = get_agents_vulnerabilities(host_manager) + # Check alert in Wazuh Indexer + # monitoring_data = generate_monitoring_alerts_all_agent(host_manager, body['check_alerts']) + expected_alerts = body['check_agent_alert_indexer'] + # Check agent System state - monitoring_events(get_host_manager, monitoring_data) + results = monitoring_events(host_manager, monitoring_data) + assert all(results.values()), f"Expected message was not triggered for some agents, {results}" From b9f1101992dcb7ecc7b67f9f8a72c322536b1ab3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 15 Nov 2023 16:59:55 +0000 Subject: [PATCH 006/174] feat: create modules for E2E Vuln operations --- .../wazuh_testing/end_to_end/__init__.py | 11 + .../wazuh_testing/end_to_end/cases_handler.py | 0 .../wazuh_testing/end_to_end/configuration.py | 55 +++ .../wazuh_testing/end_to_end/indexer_api.py | 16 + .../wazuh_testing/end_to_end/logs.py | 18 + .../wazuh_testing/end_to_end/monitoring.py | 85 ++++ .../wazuh_testing/end_to_end/regex.py | 41 ++ .../end_to_end/regexes}/regex.yaml | 0 .../end_to_end/remote_operations_handler.py | 90 ++++ .../wazuh_testing/end_to_end/services.py | 4 + .../wazuh_testing/end_to_end/waiters.py | 19 + .../wazuh_testing/end_to_end/wazuh_api.py | 118 +++++ .../vulnerability_detector/event_monitor.py | 2 +- .../wazuh_testing/wazuh_testing/tools/file.py | 7 + .../wazuh_testing/tools/system.py | 10 + provisioning/roles/createInventory.yaml | 2 +- .../test_vulnerability_detector/cases.yaml | 110 ++--- .../test_vulnerability_detector/conftest.py | 56 +++ .../test_vulnerability_detector/test_scans.py | 438 +++--------------- 19 files changed, 652 insertions(+), 430 deletions(-) create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/cases_handler.py create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/logs.py create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/regex.py rename {tests/end_to_end/test_vulnerability_detector/data => deps/wazuh_testing/wazuh_testing/end_to_end/regexes}/regex.yaml (100%) create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/services.py create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py create mode 100644 tests/end_to_end/test_vulnerability_detector/conftest.py diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py b/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py index cafbf7f4ee..df24fe5835 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py @@ -11,6 +11,17 @@ fetched_alerts_json_path = os.path.join(gettempdir(), 'alerts.json') +configuration_filepath_os = { + 'linux': '/var/ossec/etc/ossec.conf', + 'windows': r'C:\\Program Files (x86)\\ossec-agent\\ossec.conf', + 'macos': '/Library/Ossec/etc/ossec.conf' +} +logs_filepath_os = { + 'linux': '/var/ossec/logs/ossec.log', + 'windows': r'C:\\Program Files (x86)\\ossec-agent\\ossec.log', + 'macos': '/Library/Ossec/logs/ossec.log' +} + @retry(Exception, attempts=3, delay=5) def get_alert_indexer_api(query, credentials, ip_address, index='wazuh-alerts-4.x-*'): diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/cases_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/cases_handler.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py new file mode 100644 index 0000000000..a1869d8048 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -0,0 +1,55 @@ +from multiprocessing.pool import ThreadPool +import xml.dom.minidom +from ansible.parsing.dataloader import DataLoader + +from wazuh_testing.end_to_end import configuration_filepath_os +from wazuh_testing.tools.configuration import set_section_wazuh_conf + + +# Configuration methods +def backup_configurations(host_manager): + backup_configurations = {} + for host in host_manager.get_group_hosts('all'): + host_variables = host_manager.get_host_variables(host) + host_os = host_variables['os_name'] + configuration_file_path = configuration_filepath_os[host_os] + current_configuration = host_manager.get_file_content(str(host), configuration_file_path) + backup_configurations[str(host)] = current_configuration + return backup_configurations + + +def restore_backup(host_manager, backup_configurations): + for host in host_manager.get_group_hosts('all'): + host_variables = host_manager.get_host_variables(host) + host_os = host_variables['os_name'] + configuration_file_path = configuration_filepath_os[host_os] + host_manager.modify_file_content(str(host), configuration_file_path, backup_configurations[str(host)]) + + +def configure_environment(host_manager, configurations): + def configure_host(host, host_configuration_role): + host_os = host_manager.get_host_variables(host)['os_name'] + configuration_file_path = configuration_filepath_os[host_os] + + host_groups = host_manager.get_host_groups(host) + host_configuration = None + if 'manager' in host_groups: + host_configuration = host_configuration_role['manager'] + elif 'agent' in host_groups: + host_configuration = host_configuration_role['agent'] + + current_configuration = host_manager.get_file_content(str(host), configuration_file_path) + new_configuration = set_section_wazuh_conf(host_configuration[0].get('sections'), current_configuration.split("\n")) + + new_configuration = [line for line in new_configuration if line.strip() != ""] + dom = xml.dom.minidom.parseString(''.join(new_configuration)) + new_configuration = "\n".join(dom.toprettyxml().split("\n")[1:]) + + host_manager.modify_file_content(str(host), configuration_file_path, new_configuration) + + + loader = DataLoader() + configure_environment_parallel_map = [ (host, configurations) for host in host_manager.get_group_hosts('all')] + + with ThreadPool() as pool: + pool.starmap(configure_host, configure_environment_parallel_map) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py new file mode 100644 index 0000000000..f6b0789849 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -0,0 +1,16 @@ +import requests + + +STATE_INDEX_NAME = 'wazuh-vulnerabilities-states' + +# Indexer API methods +def get_vuln_state_value(host_manager, credentials={'user': 'wazuh', 'password': 'wazuh'}): + url = f"https://{host_manager.get_master_ip(host_manager)}:9200/{STATE_INDEX_NAME}_search?" + query = { + "query": { + "match_all": {} + } + } + response = requests.get(url=url, params={'pretty': 'true'}, json=query, verify=False, + auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password'])) + return response.text diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py new file mode 100644 index 0000000000..716ed86a0a --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py @@ -0,0 +1,18 @@ +from wazuh_testing.end_to_end import logs_filepath_os + + +def truncate_agents_logs(host_manager): + for agent in host_manager.get_group_hosts('agent'): + host_os_name = host_manager.get_host_variables(agent)['os_name'] + host_manager.truncate_file(agent, logs_filepath_os[host_os_name]) + +def truncate_managers_logs(host_manager): + for agent in host_manager.get_group_hosts('manager'): + host_os_name = host_manager.get_host_variables(agent)['os_name'] + host_manager.truncate_file(agent, logs_filepath_os[host_os_name]) + +def truncate_logs(host_manager): + # for manager in host_manager.get_group_hosts('manager'): + # host_manager.truncate_file(manager, '/var/ossec/logs/alerts/alerts.json') + truncate_managers_logs(host_manager) + truncate_agents_logs(host_manager) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py new file mode 100644 index 0000000000..6f7eb409f5 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -0,0 +1,85 @@ +import os +import tempfile + +from wazuh_testing.end_to_end import logs_filepath_os +from wazuh_testing.tools.file import create_temp_file +from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.end_to_end.regex import get_event_regex + + +def monitoring_events(host_manager, monitoring_data): + monitoring_file_content = '' + results = {} + + for host, data in monitoring_data.items(): + monitoring_file_content += f"{host}:\n" + for monitoring_event in data: + string_limiter = "'" if '"' in monitoring_event.get("regex", "") else '"' + print(f"String limiter {string_limiter}") + monitoring_file_content += f' - regex: {string_limiter}{monitoring_event.get("regex", "")}{string_limiter}\n' + monitoring_file_content += f' path: {string_limiter}{monitoring_event.get("path", "")}{string_limiter}\n' + monitoring_file_content += f' timeout: {monitoring_event.get("timeout", 0)}\n' + + temp_file = create_temp_file(monitoring_file_content) + try: + temporal_directory = tempfile.TemporaryDirectory() + print(temporal_directory.name) + results.update(HostMonitor(inventory_path=host_manager.get_inventory_path(), messages_path=temp_file, tmp_path=temporal_directory.name).run()) + except TimeoutError: + pass + + os.remove(temp_file) + + return results + + +def generate_monitoring_logs_all_agent(host_manager, regex_list, timeout_list): + monitoring_data = {} + for agent in host_manager.get_group_hosts('agent'): + monitoring_data[agent] = [] + for index, regex_index in enumerate(regex_list): + os_name = host_manager.get_host_variables(agent)['os_name'] + monitoring_data[agent].append({ + 'regex': regex_index, + 'path': logs_filepath_os[os_name], + 'timeout': timeout_list[index] + + }) + return monitoring_data + + +def generate_monitoring_logs_manager(host_manager, manager, regex, timeout): + monitoring_data = {} + os_name = host_manager.get_host_variables(manager)['os_name'] + monitoring_data[manager] = [{ + 'regex': regex, + 'path': logs_filepath_os[os_name], + 'timeout': timeout + + }] + + return monitoring_data + + +def generate_monitoring_alerts_all_agent(host_manager, events_metadata): + monitoring_data = {} + + for agent in host_manager.get_group_hosts('agent'): + host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] + metadata_agent = events_metadata[host_os_name] + + if not host_manager.get_host_variables(agent)['manager'] in monitoring_data: + monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] + + for event in metadata_agent[agent.get_host_variables(agent)['arch']]: + event['parameters']['HOST_NAME'] = agent + monitoring_element = { + 'regex': get_event_regex(event), + 'path': '/var/ossec/logs/alerts/alerts.json', + 'timeout': 120, + } + + if 'parameters' in metadata_agent: + monitoring_element['parameters'] = metadata_agent['parameters'] + + monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) \ No newline at end of file diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py new file mode 100644 index 0000000000..f25037fdfd --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py @@ -0,0 +1,41 @@ + +regex = { + 'syscollector_scan_start': { + 'regex': '.*INFO: Starting evaluation.' + }, + 'syscollector_scan_end': { + 'regex': '.*INFO: Starting evaluation.' + }, + 'syscollector_install_package_alert_yum': { + 'regex': '.*installed.*agent".*"name":"(\S+)".*Installed: (\S+).*?(\S+)', + 'parameters': ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] + }, + 'syscollector_install_package_alert_apt': { + 'regex': '.*New dpkg \(Debian Package\) installed.*.*agent".*"name":"(\S+).*package":"(\S+)","arch":"amd64","version":"(\S+)"', + 'parameters': ['HOST_NAME', 'PACKAGE_NAME', 'PACKAGE_VERSION'] + }, + 'syscollector_upgrade_package_alert_yum': { + 'regex': '.*Yum package updated.*agent".*"name":"(\S+)".*Updated: (\S+).*?(\S+)', + 'parameters': ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] + }, + 'vulnerability_alert':{ + 'regex': '.*HOST_NAME.*package:.*name":"PACKAGE_NAME".*version":"PACKAGE_VERSION".*"architecture":"ARCHITECTURE.*"cve":"CVE"', + 'parameters': ['HOST_NAME', 'CVE', 'PACKAGE_NAME', 'PACKAGE_VERSION', 'ARCHITECTURE'] + } +} + + +def get_event_regex(event): + """ + """ + expected_event = regex[event['event']] + expected_regex = expected_event['regex'] + + if 'parameters' in expected_event and not 'parameters' in event: + raise Exception(f"Not provided enaugh data to create regex. Missing {event['PARAMETERS']}") + elif 'parameters' in event: + for parameter in expected_event['parameters']: + expected_regex = expected_regex.replace(parameter, event['parameters'][parameter]) + + + return expected_regex diff --git a/tests/end_to_end/test_vulnerability_detector/data/regex.yaml b/deps/wazuh_testing/wazuh_testing/end_to_end/regexes/regex.yaml similarity index 100% rename from tests/end_to_end/test_vulnerability_detector/data/regex.yaml rename to deps/wazuh_testing/wazuh_testing/end_to_end/regexes/regex.yaml diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py new file mode 100644 index 0000000000..f0b953f40e --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -0,0 +1,90 @@ +from wazuh_testing.end_to_end.regex import get_event_regex +from wazuh_testing.end_to_end.monitoring import monitoring_events +from multiprocessing.pool import ThreadPool + + +def launch_remote_operation(host, operation, operation_data, host_manager): + host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] + host_os_arch = host_manager.get_host_variables(host)['arch'] + + system = host_manager.get_host_variables(host)['os_name'] + if system == 'linux': + system = host_manager.get_host_variables(host)['os'].split('_')[0] + + + if operation == 'install_package': + package_data = operation_data['package'] + package_url = package_data[host_os_name][host_os_arch] + host_manager.install_package(host, package_url, system) + + elif operation == 'remove_package': + package_data = operation_data['package'] + package_name = package_data[host_os_name] + host_manager.remove_package(host, package_name, system) + + elif operation == 'check_agent_vulnerability': + if operation_data['parameters']['alert_indexed']: + check_vuln_indexer(host_manager, operation_data['vulnerability_data']) + if operation_data['parameters']['alert']: + check_vuln_alert(host_manager, operation_data['vulnerability_data']) + if operation_data['parameters']['api']: + check_vuln_alert_api(host_manager, operation_data['vulnerability_data']) + if operation_data['parameters']['state_indice']: + check_vuln_state_index(host_manager, operation_data['vulnerability_data']) + + +def check_vuln_state_index(host_manager, vulnerability_data): + pass + +def check_vuln_indexer(host_manager, vulnerability_data): + pass + +def check_vuln_alert_api(host_manager, vulnerability_data): + pass + +def check_vuln_alert(host_manager, vulnerability_data): + monitoring_data = {} + + for agent in host_manager.get_group_hosts('agent'): + host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] + host_os_arch = host_manager.get_host_variables(agent)['arch'] + + agent_vulnerability_data_parameters = vulnerability_data[host_os_name][host_os_arch] + agent_vulnerability_data_parameters['HOST_NAME'] = agent + + agent_vulnerability_data = { + 'event': 'vulnerability_alert', + 'parameters': agent_vulnerability_data_parameters + } + + regex = get_event_regex(agent_vulnerability_data) + + monitoring_element = { + 'regex': regex, + 'path': '/var/ossec/logs/alerts/alerts.json', + 'timeout': 30, + } + + if host_manager.get_host_variables(agent)['manager'] not in monitoring_data: + monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] + + monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) + + monitoring_events(host_manager, monitoring_data) + + +def launch_remote_sequential_operation_on_agent(agent, task_list, host_manager): + if task_list: + for task in task_list: + task_keys = list(task.keys()) + task_values = list(task.values()) + operation, operation_data = task_keys[0], task_values[0] + launch_remote_operation(agent, operation, operation_data, host_manager) + + +def launch_parallel_operations(task_list, host_manager, group='agent'): + agents = host_manager.get_group_hosts('agent') + parallel_configuration = [(agent, task_list, host_manager) for agent in agents] + with ThreadPool() as pool: + # Use the pool to map the function to the list of hosts + pool.starmap(launch_remote_sequential_operation_on_agent, parallel_configuration) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/services.py b/deps/wazuh_testing/wazuh_testing/end_to_end/services.py new file mode 100644 index 0000000000..aa0e4182e4 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/services.py @@ -0,0 +1,4 @@ +def control_environment(host_manager, operation, group_list): + for group in group_list: + for host in host_manager.get_group_hosts(group): + host_manager.handle_wazuh_services(host, operation) \ No newline at end of file diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py new file mode 100644 index 0000000000..c56ccbc410 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -0,0 +1,19 @@ +from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_manager, monitoring_events +from wazuh_testing.end_to_end.wazuh_api import get_agents_id + + + +def wait_until_vd_is_updated(host_manager): + monitoring_data = {} + for manager in host_manager.get_group_hosts('manager'): + monitoring_data = generate_monitoring_logs_manager(host_manager, manager, 'Starting vulnerability scan', 600) + + monitoring_events(host_manager, monitoring_data) + + +def wait_until_vuln_scan_agents_finished(host_manager): + for agent in host_manager.get_group_hosts('agent'): + manager_host = host_manager.get_host_variables(agent)['manager'] + agents_id = get_agents_id(host_manager) + monitoring_data = generate_monitoring_logs_manager(host_manager, manager_host,rf"Finished vulnerability assessment for agent '{agents_id[agent]}'", 30) + monitoring_events(host_manager, monitoring_data) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py new file mode 100644 index 0000000000..8cc29ea6c4 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py @@ -0,0 +1,118 @@ +""" +Wazuh API Operations Module + +This module provides functions for handling Wazuh API operations in a HostManager environment. +It includes methods for retrieving API parameters, obtaining authentication tokens, +and retrieving information about Wazuh agents and their vulnerabilities. + +Functions: +- get_api_parameters(host_manager): Retrieves Wazuh API parameters. +- get_api_token(host_manager): Retrieves the API token for authentication. +- get_agents_id(host_manager): Retrieves the IDs of Wazuh agents. +- get_agents_vulnerabilities(host_manager): Retrieves vulnerability information for Wazuh agents. + +Parameters: +- host_manager (HostManager): An instance of the HostManager class representing the Wazuh environment. + +Usage Example: +```python +from host_manager_module import HostManager +from wazuh_api_module import get_api_parameters, get_api_token, get_agents_id, get_agents_vulnerabilities + +# Create an instance of HostManager +host_manager = HostManager() + +# Retrieve API parameters +api_params = get_api_parameters(host_manager) + +# Obtain API token +api_token = get_api_token(host_manager) + +# Retrieve Wazuh agents IDs +agents_ids = get_agents_id(host_manager) + +# Retrieve vulnerability information for agents +agents_vuln = get_agents_vulnerabilities(host_manager) + +Note: Make sure to replace 'host_manager_module' and 'wazuh_api_module' with the actual module names in your project. +""" +from wazuh_testing.api import make_api_call, get_token_login_api + +# Wazuh API Methods +def get_api_parameters(host_manager): + """ + Retrieves the Wazuh API parameters. + + Parameters: + - host_manager (HostManager): An instance of the HostManager class. + + Returns: + dict: A dictionary containing Wazuh API parameters, including protocol, host, port, user, and password. + """ + + api_parameters = { + 'protocol': 'https', + 'host': host_manager.get_master_ip(), + 'port': '55000', + 'user': 'wazuh', + 'pass': 'wazuh' + } + return api_parameters + + +def get_api_token(host_manager): + """ + Retrieves the API token for authentication. + + Parameters: + - host_manager (HostManager): An instance of the HostManager class. + + Returns: + str: The API token for authentication. + """ + login_endpoint = '/security/user/authenticate' + api_parameters = get_api_parameters(host_manager) + response_token = get_token_login_api(api_parameters['protocol'], api_parameters['host'], api_parameters['port'], + api_parameters['user'], api_parameters['pass'], login_endpoint, + timeout=10, login_attempts=3, sleep_time=1) + return response_token + + +def get_agents_id(host_manager): + """ + Retrieves the IDs of Wazuh agents. + + Parameters: + - host_manager (HostManager): An instance of the HostManager class. + + Returns: + dict: A dictionary mapping agent names to their corresponding IDs. + """ + + api_token = get_api_token(host_manager) + agent_output = make_api_call(host_manager.get_master_ip(), endpoint='/agents', token=api_token).json() + agents_ids = {} + for agent in agent_output['data']['affected_items']: + agents_ids[agent['name']] = agent['id'] + + return agents_ids + + +def get_agents_vulnerabilities(host_manager): + """ + Retrieves vulnerability information for Wazuh agents. + + Parameters: + - host_manager (HostManager): An instance of the HostManager class. + + Returns: + dict: A dictionary mapping agent names to a list of their vulnerabilities. + """ + api_token = get_api_token(host_manager) + agents_ids = get_agents_id(host_manager) + agents_vuln = {} + for agent in host_manager.get_group_hosts('agent'): + agents_vuln[agent] = make_api_call(host_manager.get_master_ip(), endpoint=f"/vulnerability/{agents_ids[agent]}", + token=api_token).json()['data']['affected_items'] + + return agents_vuln diff --git a/deps/wazuh_testing/wazuh_testing/modules/vulnerability_detector/event_monitor.py b/deps/wazuh_testing/wazuh_testing/modules/vulnerability_detector/event_monitor.py index bb3c76ea04..0a5c843593 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/vulnerability_detector/event_monitor.py +++ b/deps/wazuh_testing/wazuh_testing/modules/vulnerability_detector/event_monitor.py @@ -167,7 +167,7 @@ def check_vulnerability_full_scan_end(log_monitor=None, agent_id='000'): agent_id (str): Agent ID. """ check_vuln_detector_event(file_monitor=log_monitor, timeout=vd.T_40, - callback=f"Finished vulnerability assessment for agent '{agent_id}'", + callback=rf"Finished vulnerability assessment for agent \\'{agent_id}\\'", error_message='No full scan end has been detected in the log.') diff --git a/deps/wazuh_testing/wazuh_testing/tools/file.py b/deps/wazuh_testing/wazuh_testing/tools/file.py index d4bab4592f..8f41b4059f 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/file.py +++ b/deps/wazuh_testing/wazuh_testing/tools/file.py @@ -924,3 +924,10 @@ def modify_file(path, name, new_content=None, is_binary=False): modify_file_group(path, name) modify_file_permission(path, name) modify_file_win_attributes(path, name) + + +def create_temp_file(content): + fd, temp_file_path = tempfile.mkstemp(text=True) # 'text=True' specifies text mode + with open(temp_file_path, 'w', newline='\n') as temp_file: + temp_file.write(content) + return temp_file_path diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index f6e0fab106..18b2a11bf1 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -441,6 +441,16 @@ def install_package(self, host, url, system='ubuntu'): if 'rc' in a and a['rc'] == 0 and a['changed'] == True: result = True + def get_master_ip(self): + """ + + """ + master_ip = None + for manager in self.get_group_hosts('manager'): + if 'type' in self.get_host_variables(manager) and \ + self.get_host_variables(manager)['type'] == 'master': + master_ip = self.get_host_variables(manager)['ip'] + return master_ip def remove_package(self, host, package_name, system): result = False diff --git a/provisioning/roles/createInventory.yaml b/provisioning/roles/createInventory.yaml index c97f4cb6fb..63de2c2d7b 100644 --- a/provisioning/roles/createInventory.yaml +++ b/provisioning/roles/createInventory.yaml @@ -3,5 +3,5 @@ tasks: - name: Template a file ansible.builtin.template: - src: "{{ source_template }}" + src: "{{ source_template }}" dest: "{{ dest_inventory}}" diff --git a/tests/end_to_end/test_vulnerability_detector/cases.yaml b/tests/end_to_end/test_vulnerability_detector/cases.yaml index 2f548d2edf..7015054ef4 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases.yaml @@ -8,53 +8,55 @@ package: centos: amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm - arm: null + arm: https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.17-2PGDG.rhel7.aarch64.rpm ubuntu: amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb - arm: null + arm: https://grafana.com/grafana/download/8.5.5?edition=enterprise&platform=arm windows: amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe macos: amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg - -# {"timestamp":"2023-11-14T10:52:41.932+0000","rule":{"level":10,"description":"CVE-2020-28924 affects rclone","id":"23505","firedtimes":392,"mail":false,"groups":["vulnerability-detector"],"gdpr":["IV_35.7.d"],"pci_dss":["11.2.1","11.2.3"],"tsc":["CC7.1","CC7.2"]},"agent":{"id":"002","name":"ip-172-31-12-122.ec2.internal","ip":"172.31.12.122"},"manager":{"name":"ip-172-31-6-24"},"id":"1699959161.19604197","cluster":{"name":"wazuh","node":"master"},"decoder":{"name":"json"},"data":{"vulnerability":{"package":{"name":"rclone","version":"1.49.5-1","architecture":"x86_64","condition":"Package less than 1.53.3"},"cvss":{"cvss2":{"vector":{"attack_vector":"network","access_complexity":"low","authentication":"none","confidentiality_impact":"partial","integrity_impact":"none","availability":"none"},"base_score":"5","exploitability_score":"10","impact_score":"2.900000"},"cvss3":{"vector":{"attack_vector":"network","access_complexity":"low","privileges_required":"none","user_interaction":"none","scope":"unchanged","confidentiality_impact":"high","integrity_impact":"none","availability":"none"},"base_score":"7.500000","exploitability_score":"3.900000","impact_score":"3.600000"}},"cve":"CVE-2020-28924","title":"CVE-2020-28924 affects rclone","rationale":"An issue was discovered in Rclone before 1.53.3. Due to the use of a weak random number generator, the password generator has been producing weak passwords with much less entropy than advertised. The suggested passwords depend deterministically on the time the second rclone was started. This limits the entropy of the passwords enormously. These passwords are often used in the crypt backend for encryption of data. It would be possible to make a dictionary of all possible passwords with about 38 million entries per password length. This would make decryption of secret material possible with a plausible amount of effort. NOTE: all passwords generated by affected versions should be changed.","severity":"High","published":"2020-11-19","updated":"2022-04-26","cwe_reference":"CWE-331","status":"Active","type":"PACKAGE","references":["https://github.com/rclone/rclone/issues/4783","https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/UJIFT24Q6EFXLQZ24AER2QGFFZLMIPCD/","https://rclone.org/downloads/","https://security.gentoo.org/glsa/202107-14","https://nvd.nist.gov/vuln/detail/CVE-2020-28924"],"assigner":"cve@mitre.org"}},"location":"vulnerability-detector"} - - check_agent_alert_indexer: - centos: - amd64: - - event: syscollector_install_package_alert_yum - parameters: - PACKAGE_NAME: "rclone" - PACKAGE_VERSION: "1.49.5" - arm: - - event: syscollector_install_package_alert_yum - parameters: - PACKAGE_NAME: "rclone" - PACKAGE_VERSION: "1.49.5" - ubuntu: - amd64: - - event: syscollector_install_package_alert_apt - parameters: - PACKAGE_NAME: "rclone" - PACKAGE_VERSION: "1.49.5" - arm: - - event: syscollector_install_package_alert_apt - parameters: - PACKAGE_NAME: "rclone" - PACKAGE_VERSION: "1.49.5" - windows: - amd64: - - event: syscollector_install_package_alert_apt - parameters: - PACKAGE_NAME: "VideoLAN" - PACKAGE_VERSION: "3.0.6" - macos: - amd64: - - event: syscollector_install_package_alert_apt - parameters: - PACKAGE_NAME: - PACKAGE_VERSION: - + - check_agent_vulnerability: + parameters: + alert_indexed: False + api: False + alert: True + state_indice: False + vulnerability_data: + centos: + amd64: + PACKAGE_NAME: "rclone" + PACKAGE_VERSION: "1.49.5" + CVE: CVE-2020-28924 + ARCHITECTURE: 'x86_64' + arm: + PACKAGE_NAME: "postgresql11-libs" + PACKAGE_VERSION: "11.17.2" + CVE: CVE-2020-28924 + ARCHITECTURE: 'arm64' + ubuntu: + amd64: + PACKAGE_NAME: "rclone" + PACKAGE_VERSION: "1.49.5" + CVE: CVE-2020-28924 + ARCHITECTURE: 'x86_64' + arm: + PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.5" + CVE: CVE-2020-28924 + ARCHITECTURE: 'arm64' + windows: + amd64: + PACKAGE_NAME: "vlc" + PACKAGE_VERSION: "3.0.6" + CVE: CVE-2020-28924 + ARCHITECTURE: 'x86_64' + macos: + amd64: + PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.0.1" + CVE: CVE-2020-28924 + ARCHITECTURE: 'x86_64' # check_alerts: # centos: @@ -91,19 +93,19 @@ # parameters: # PACKAGE_NAME: # PACKAGE_VERSION: - teardown: - - remove_package: - package: - centos: - amd: rclone - arm: rclone - ubuntu: - amd: rclone - arm: rclone - windows: - amd64: C:\\\\Program Files\\\\VideoLAN\\\\VLC\\\\uninstall.exe - macos: - amd64: + # teardown: + # - remove_package: + # package: + # centos: + # amd: rclone + # arm: rclone + # ubuntu: + # amd: rclone + # arm: rclone + # windows: + # amd64: C:\\\\Program Files\\\\VideoLAN\\\\VLC\\\\uninstall.exe + # macos: + # amd64: # - case: "Updating a vulnerable package that remains vulnerable to the same CVE" # id: "update_vuln_package_remain_vulnerable" diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py new file mode 100644 index 0000000000..0d9d40c9f9 --- /dev/null +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -0,0 +1,56 @@ +import pytest + +from wazuh_testing.tools.system import HostManager +from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations + +@pytest.fixture(scope='module') +def host_manager(request): + inventory_path = request.config.getoption('--inventory-path') + manager = HostManager(inventory_path) + + return manager + +@pytest.fixture(scope='function') +def setup(preconditions, teardown, host_manager): + """ + """ + host_manager = host_manager + + if preconditions: + launch_parallel_operations(preconditions['tasks'], host_manager) + + # if 'check_alerts' in preconditions: + # monitoring_data = {} + + # for agent in host_manager.get_group_hosts('agent'): + # host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] + # check_alerts_data = preconditions['check_alerts'][host_os_name] + + # for event in check_alerts_data: + # if not host_manager.get_host_variables(agent)['manager'] in monitoring_data: + # monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] + + # if not 'parameters' in event: + # event['parameters'] = {} + # event['parameters']['HOST_NAME'] = agent + + # regex = get_event_regex(event) + + # monitoring_element = { + # 'regex': regex, + # 'path': '/var/ossec/logs/alerts/alerts.json', + # 'timeout': 30, + # 'parameters': event['parameters'] + # } + # monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) + + # monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) + # monitoring_events(host_manager, monitoring_data) + + yield + + if teardown: + launch_parallel_operations(teardown, host_manager) + + for host in host_manager.get_group_hosts('manager'): + host_manager.truncate_file(host, '/var/ossec/logs/alerts/alerts.json') \ No newline at end of file diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_scans.py index 1b32481e5b..b823a97f93 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_scans.py +++ b/tests/end_to_end/test_vulnerability_detector/test_scans.py @@ -8,7 +8,6 @@ import pytest import ansible_runner -import xml.dom.minidom import yaml from ansible.inventory.manager import InventoryManager from ansible.parsing.dataloader import DataLoader @@ -19,49 +18,56 @@ from wazuh_testing.tools.monitoring import HostMonitor from wazuh_testing.tools.system import HostManager from wazuh_testing.api import make_api_call, get_token_login_api + from wazuh_testing.end_to_end import get_alert_indexer_api +from wazuh_testing.end_to_end.configuration import backup_configurations, restore_backup, configure_environment +from wazuh_testing.end_to_end.services import control_environment +from wazuh_testing.end_to_end.logs import truncate_agents_logs, truncate_managers_logs, truncate_logs +from wazuh_testing.end_to_end.wazuh_api import get_agents_id, get_agents_vulnerabilities +from wazuh_testing.end_to_end.indexer_api import get_vuln_state_value +from wazuh_testing.end_to_end.waiters import wait_until_vd_is_updated, wait_until_vuln_scan_agents_finished +from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_all_agent, monitoring_events +from wazuh_testing.end_to_end.regex import get_event_regex +from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations current_dir = os.path.dirname(__file__) configurations_dir = os.path.join(current_dir, "data", "configurations") cases = {} local_path = os.path.dirname(os.path.abspath(__file__)) -tmp_path = os.path.join(local_path, 'tmp') -regex_path = os.path.join(current_dir, 'data', 'regex.yaml') -STATE_INDEX_NAME = 'agents_state_index' - - -with open(os.path.join(current_dir, 'cases.yaml'), 'r') as cases_file: - cases = yaml.load(cases_file, Loader=yaml.FullLoader) - configurations_paths = { 'manager': os.path.join(configurations_dir, 'manager.yaml'), 'agent': os.path.join(configurations_dir, 'agent.yaml') } - -configuration_filepath_os = { - 'linux': '/var/ossec/etc/ossec.conf', - 'windows': 'C:\Program Files (x86)\ossec-agent\ossec.conf', - 'macos': '/Library/Ossec/etc/ossec.conf' -} -logs_filepath_os = { - 'linux': '/var/ossec/logs/ossec.log', - 'windows': 'C:\Program Files (x86)\ossec-agent\ossec.log', - 'macos': '/Library/Ossec/logs/ossec.log' -} +with open(os.path.join(current_dir, 'cases.yaml'), 'r') as cases_file: + cases = yaml.load(cases_file, Loader=yaml.FullLoader) +complete_list = [ + ( + case['preconditions'] if 'preconditions' in case else None, + case['body'] if 'body' in case else None, + case['teardown'] if 'teardown' in case else None + ) + for case in cases +] -complete_list = [ (case['preconditions'], case['body'], case['teardown']) for case in cases] dependencies = [None if 'depends' not in case else pytest.mark.depends(name=case['depend']) for case in cases] list_ids = [ case['id'] for case in cases] +def load_vulnerability_detector_configurations(): + return { + 'agent': load_configuration_template(configurations_paths['agent'], [{}], [{}]), + 'manager': load_configuration_template(configurations_paths['manager'], [{}], [{}]) + } + + @pytest.fixture(scope='module') def setup_vulnerability_tests(host_manager): # Configure managers and agents hosts_configuration_backup = backup_configurations(host_manager) - configure_environment_manager(host_manager, load_vulnerability_detector_configurations()) + configure_environment(host_manager, load_vulnerability_detector_configurations()) # Restart managers and stop agents control_environment(host_manager, 'stop', ['agent']) @@ -80,346 +86,19 @@ def setup_vulnerability_tests(host_manager): restore_backup(host_manager, hosts_configuration_backup) -def backup_configurations(host_manager): - backup_configurations = {} - for host in host_manager.get_group_hosts('all'): - host_variables = host_manager.get_host_variables(host) - host_os = host_variables['os_name'] - configuration_file_path = configuration_filepath_os[host_os] - current_configuration = host_manager.get_file_content(str(host), configuration_file_path) - backup_configurations[str(host)] = current_configuration - return backup_configurations - - -def restore_backup(host_manager, backup_configurations): - for host in host_manager.get_group_hosts('all'): - host_variables = host_manager.get_host_variables(host) - host_os = host_variables['os_name'] - configuration_file_path = configuration_filepath_os[host_os] - host_manager.modify_file_content(str(host), configuration_file_path, backup_configurations[str(host)]) - - -def load_vulnerability_detector_configurations(): - return { - 'agent': load_configuration_template(configurations_paths['agent'], [{}], [{}]), - 'manager': load_configuration_template(configurations_paths['manager'], [{}], [{}]) - } - - -def configure_environment_manager(host_manager, configurations): - def configure_host(host, host_configuration_role): - host_os = host_manager.get_host_variables(host)['os_name'] - configuration_file_path = configuration_filepath_os[host_os] - - host_groups = host_manager.get_host_groups(host) - host_configuration = None - if 'manager' in host_groups: - host_configuration = host_configuration_role['manager'] - elif 'agent' in host_groups: - host_configuration = host_configuration_role['agent'] - - current_configuration = host_manager.get_file_content(str(host), configuration_file_path) - new_configuration = set_section_wazuh_conf(host_configuration[0].get('sections'), current_configuration.split("\n")) - - new_configuration = [line for line in new_configuration if line.strip() != ""] - dom = xml.dom.minidom.parseString(''.join(new_configuration)) - new_configuration = "\n".join(dom.toprettyxml().split("\n")[1:]) - - host_manager.modify_file_content(str(host), configuration_file_path, new_configuration) - - - loader = DataLoader() - configure_environment_parallel_map = [ (host, configurations) for host in host_manager.get_group_hosts('all')] - with ThreadPool() as pool: - pool.starmap(configure_host, configure_environment_parallel_map) - - -def control_environment(host_manager, operation, group_list): - for group in group_list: - for host in host_manager.get_group_hosts(group): - host_manager.handle_wazuh_services(host, operation) - - -def get_event_regex(event, operation_data=None): - """ +def check_vuln_state_index(agents_vulnerabilities_index_value, agents_vulnerabilities_api_value): + """Check that agents vulnerabilities match with index state values """ - regexes = {} - with open(regex_path, 'r') as regex_file: - regexes = yaml.load(regex_file, Loader=yaml.FullLoader) - - expected_event = regexes[event['event']] - expected_regex = expected_event['regex'] - - if 'parameters' in expected_event and not 'parameters' in event: - raise Exception(f"Not provided enaugh data to create regex. Missing {event['PARAMETERS']}") - elif 'parameters' in event: - for parameter in expected_event['parameters']: - expected_regex = expected_regex.replace(parameter, event['parameters'][parameter]) - - - return expected_regex - - -@pytest.fixture(scope='module') -def host_manager(request): - inventory_path = request.config.getoption('--inventory-path') - manager = HostManager(inventory_path) - - return manager - -def truncate_agents_logs(host_manager): - for agent in host_manager.get_group_hosts('agent'): - host_os_name = host_manager.get_host_variables(agent)['os_name'] - host_manager.truncate_file(agent, logs_filepath_os[host_os_name]) - -def truncate_managers_logs(host_manager): - for agent in host_manager.get_group_hosts('manager'): - host_os_name = host_manager.get_host_variables(agent)['os_name'] - host_manager.truncate_file(agent, logs_filepath_os[host_os_name]) - -def truncate_logs(host_manager): - # for manager in host_manager.get_group_hosts('manager'): - # host_manager.truncate_file(manager, '/var/ossec/logs/alerts/alerts.json') - truncate_managers_logs(host_manager) - truncate_agents_logs(host_manager) - - -def wait_until_vd_is_updated(host_manager): - monitoring_data = {} - for manager in host_manager.get_group_hosts('manager'): - monitoring_data = generate_monitoring_logs_manager(host_manager, manager, 'Starting vulnerability scan', 600) - - monitoring_events(host_manager, monitoring_data) - - -def wait_until_vuln_scan_finished(host_manager): - monitoring_data = {} - for manager in host_manager.get_group_hosts('manager'): - monitoring_data = generate_monitoring_logs_manager(host_manager, manager, 'Vulnerability scan finished.', 600) - - monitoring_events(host_manager, monitoring_data) - - -def launch_remote_operation(host, operation, operation_data, host_manager): - print(f"Operation {operation} in {host} with {operation_data}") - host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] - host_os_arch = host_manager.get_host_variables(host)['arch'] - - system = host_manager.get_host_variables(host)['os_name'] - if system == 'linux': - system = host_manager.get_host_variables(host)['os'].split('_')[0] - - - if operation == 'install_package': - package_data = operation_data['package'] - package_url = package_data[host_os_name][host_os_arch] - host_manager.install_package(host, package_url, system ) - elif operation == 'remove_package': - package_data = operation_data['package'] - package_name = package_data[host_os_name] - host_manager.remove_package(host, package_name, system) - - -def launch_remote_sequential_operation_on_agent(agent, task_list, host_manager): - if task_list: - for task in task_list: - task_keys = list(task.keys()) - task_values = list(task.values()) - operation, operation_data = task_keys[0], task_values[0] - launch_remote_operation(agent, operation, operation_data, host_manager) - - -def launch_parallel_operations(task_list, host_manager, group='agent'): - agents = host_manager.get_group_hosts('agent') - parallel_configuration = [(agent, task_list, host_manager) for agent in agents] - with ThreadPool() as pool: - # Use the pool to map the function to the list of hosts - pool.starmap(launch_remote_sequential_operation_on_agent, parallel_configuration) - - -@pytest.fixture(scope='function') -def setup(preconditions, teardown, host_manager): - host_manager = host_manager - - if preconditions: - launch_parallel_operations(preconditions['tasks'], host_manager) - - if 'check_alerts' in preconditions: - monitoring_data = {} - - for agent in host_manager.get_group_hosts('agent'): - host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] - check_alerts_data = preconditions['check_alerts'][host_os_name] - - for event in check_alerts_data: - if not host_manager.get_host_variables(agent)['manager'] in monitoring_data: - monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] - - if not 'parameters' in event: - event['parameters'] = {} - event['parameters']['HOST_NAME'] = agent - - regex = get_event_regex(event) - - monitoring_element = { - 'regex': regex, - 'path': '/var/ossec/logs/alerts/alerts.json', - 'timeout': 30, - 'parameters': event['parameters'] - } - monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) - - monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) - monitoring_events(host_manager, monitoring_data) - - yield - - if teardown: - launch_parallel_operations(teardown, host_manager) - - for host in host_manager.get_group_hosts('manager'): - host_manager.truncate_file(host, '/var/ossec/logs/alerts/alerts.json') - - -def create_temp_file(content): - fd, temp_file_path = tempfile.mkstemp(text=True) # 'text=True' specifies text mode - with open(temp_file_path, 'w', newline='\n') as temp_file: - temp_file.write(content) - return temp_file_path - - -def monitoring_events(host_manager, monitoring_data): - monitoring_file_content = '' - results = {} - - for host, data in monitoring_data.items(): - monitoring_file_content += f"{host}:\n" - for monitoring_event in data: - monitoring_file_content += f" - regex: '{monitoring_event['regex']}'\n" - monitoring_file_content += f" path: '{monitoring_event['path']}'\n" - monitoring_file_content += f" timeout: {monitoring_event['timeout']}\n" - - temp_file = create_temp_file(monitoring_file_content) - try: - results.update(HostMonitor(inventory_path=host_manager.get_inventory_path(), messages_path=temp_file, tmp_path=tmp_path).run()) - except TimeoutError: - pass - - os.remove(temp_file) - - return results - - -def generate_monitoring_logs_all_agent(host_manager, regex_list, timeout_list): - monitoring_data = {} - for agent in host_manager.get_group_hosts('agent'): - monitoring_data[agent] = [] - for index, regex_index in enumerate(regex_list): - os_name = host_manager.get_host_variables(agent)['os_name'] - monitoring_data[agent].append({ - 'regex': regex_index, - 'path': logs_filepath_os[os_name], - 'timeout': timeout_list[index] - - }) - - print(monitoring_data) - return monitoring_data - - -def generate_monitoring_logs_manager(host_manager, manager, regex, timeout): - monitoring_data = {} - os_name = host_manager.get_host_variables(manager)['os_name'] - monitoring_data[manager] = [{ - 'regex': regex, - 'path': logs_filepath_os[os_name], - 'timeout': timeout - - }] - - return monitoring_data - - -def generate_monitoring_alerts_all_agent(host_manager, events_metadata): - monitoring_data = {} - - for agent in host_manager.get_group_hosts('agent'): - host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] - metadata_agent = events_metadata[host_os_name] - - if not host_manager.get_host_variables(agent)['manager'] in monitoring_data: - monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] - - for event in metadata_agent[agent.get_host_variables(agent)['arch']]: - event['parameters']['HOST_NAME'] = agent - monitoring_element = { - 'regex': get_event_regex(event), - 'path': '/var/ossec/logs/alerts/alerts.json', - 'timeout': 120, - } - - if 'parameters' in metadata_agent: - monitoring_element['parameters'] = metadata_agent['parameters'] - - monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) - - -def get_master_ip(host_manager): - for manager in host_manager.get_group_hosts('manager'): - if host_manager.get_host_variables(manager)['type'] == 'master': - return host_manager.get_host_variables(manager)['ip'] - - -def get_state_index(host_manager): - url = f"https://{get_master_ip(host_manager)}:9200/{STATE_INDEX_NAME}_search?" - - response = requests.get(url=url, params={'pretty': 'true'}, json=query, verify=False, - auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password'])) - - return response.text - - -def get_agents_id(host_manager): - API_PROTOCOL = 'https' - API_HOST = get_master_ip(host_manager) - API_PORT = '55000' - API_USER = 'wazuh' - API_PASS = 'wazuh' - API_LOGIN_ENDPOINT = '/security/user/authenticate' - - response_token = get_token_login_api(API_PROTOCOL, API_HOST, API_PORT, API_USER, API_PASS, API_LOGIN_ENDPOINT, - timeout=10, login_attempts=3, sleep_time=1) - - agent_output = make_api_call(get_master_ip(host_manager), endpoint='/agents', token=response_token).json() - agents_ids = {} - for agent in agent_output['data']['affected_items']: - agents_ids[agent['name']] = agent['id'] - - return agents_ids - - -def get_agents_vulnerabilities(host_manager): - API_PROTOCOL = 'https' - API_HOST = get_master_ip(host_manager) - API_PORT = '55000' - API_USER = 'wazuh' - API_PASS = 'wazuh' - API_LOGIN_ENDPOINT = '/security/user/authenticate' - - response_token = get_token_login_api(API_PROTOCOL, API_HOST, API_PORT, API_USER, API_PASS, API_LOGIN_ENDPOINT, - timeout=10, login_attempts=3, sleep_time=1) - - agents_ids = get_agents_id(host_manager) - agents_vuln = {} - for agent in host_manager.get_group_hosts('agent'): - agents_vuln[agent] = make_api_call(get_master_ip(host_manager), endpoint=f"/vulnerability/{agents_ids[agent]}", token=response_token).json()['data']['affected_items'] + # To Do + pass - return agents_vuln @pytest.mark.dependency() -def test_syscollector_initial_scans(host_manager): - # The Agent's syscollector scan is run +def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): + """ + """ + # Monitor for the first Syscollector scan in all the agents monitoring_data = generate_monitoring_logs_all_agent(host_manager, [get_event_regex({'event': 'syscollector_scan_start'}), get_event_regex({'event': 'syscollector_scan_end'})], @@ -427,11 +106,13 @@ def test_syscollector_initial_scans(host_manager): results = monitoring_events(host_manager, monitoring_data) - assert all(results.values()), f"Expected message was not triggered for some agents, {results}" + assert all(results.values()), f"First Syscollector scan was not detected in some of the agents {results}" + # Truncate agents logs to detect second scan truncate_agents_logs(host_manager) - wait_until_vuln_scan_finished(host_manager) + # Wait until all agents has been scanned + wait_until_vuln_scan_agents_finished(host_manager) # Check vulnerabilities for agent agents_vuln_before_second_scan = get_agents_vulnerabilities(host_manager) @@ -439,12 +120,17 @@ def test_syscollector_initial_scans(host_manager): assert vuln, f"No vulnerabilities were detected for agent {agent}" # Check Agent's System states are stored - state_index_content_before_second_scan = get_state_index(host_manager) + # To Do + # state_index_content_before_second_scan = get_vuln_state_value(host_manager) # Compare agents_vuln_before_second_scan with state_index_content # To Do + # check_vuln_state_index(state_index_content_before_second_scan, agents_vuln_before_second_scan) - # The Agent's syscollector scan is run + # Truncate manager_logs to prevent trigger wait_until_vuln_scan_agents_finished wrongly + truncate_managers_logs(host_manager) + + # The Agent's syscollector second scan is run monitoring_data = generate_monitoring_logs_all_agent(host_manager, [get_event_regex({'event': 'syscollector_scan_start'}), get_event_regex({'event': 'syscollector_scan_end'})], @@ -452,37 +138,41 @@ def test_syscollector_initial_scans(host_manager): results = monitoring_events(host_manager, monitoring_data) - assert all(results.values()), f"Expected message was not triggered for some agents, {results}" - - truncate_managers_logs(host_manager) + assert all(results.values()), f"Second Syscollector scan was not detected in some of the agents {results}" - wait_until_vuln_scan_finished(host_manager) + # WARNING + # Is possible that second scan will not produce expected Finished Scan in the agent. + # In that case search for another event or include a hardcoded timeout + wait_until_vuln_scan_agents_finished(host_manager) agents_vuln_after_second_scan = get_agents_vulnerabilities(host_manager) - assert agents_vuln_before_second_scan == agents_vuln_after_second_scan # Check Agent's System states are stored - state_index_content_after_second_scan = get_state_index(host_manager) + # state_index_content_after_second_scan = get_vuln_state_value(host_manager) - assert state_index_content_after_second_scan == state_index_content_before_second_scan + # Compare agents_vuln_before_second_scan with state_index_content + # To Do + # check_vuln_state_index(state_index_content_after_second_scan, agents_vuln_before_second_scan) # @pytest.mark.dependency(depends=["test_syscollector_second_scan"]) @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) def test_vulnerability_detector_scans(preconditions, body, teardown, setup, host_manager): + """ + """ # Launch tests tasks launch_parallel_operations(body['tasks'], host_manager) - # Check vulnerability - agents_vuln_after_second_scan = get_agents_vulnerabilities(host_manager) + # # Check vulnerability + # agents_vuln_after_second_scan = get_agents_vulnerabilities(host_manager) # Check alert in Wazuh Indexer # monitoring_data = generate_monitoring_alerts_all_agent(host_manager, body['check_alerts']) - expected_alerts = body['check_agent_alert_indexer'] + # expected_alerts = body['check_agent_alert_indexer'] # Check agent System state - - results = monitoring_events(host_manager, monitoring_data) - assert all(results.values()), f"Expected message was not triggered for some agents, {results}" + # To Do + # results = monitoring_events(host_manager, monitoring_data) + # assert all(results.values()), f"Expected message was not triggered for some agents, {results}" From 8c524109232f362783b12058670ea8a4460a2b1a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 20 Nov 2023 11:39:25 +0000 Subject: [PATCH 007/174] fix: minnor errors in E2E VD tests --- .../wazuh_testing/end_to_end/configuration.py | 2 + .../wazuh_testing/end_to_end/indexer_api.py | 4 +- .../wazuh_testing/end_to_end/logs.py | 7 +- .../wazuh_testing/end_to_end/monitoring.py | 66 ++++++++--- .../wazuh_testing/end_to_end/regex.py | 2 +- .../end_to_end/remote_operations_handler.py | 78 ++++++++----- .../wazuh_testing/end_to_end/waiters.py | 11 +- .../wazuh_testing/tools/monitoring.py | 3 +- .../wazuh_testing/tools/system.py | 33 ++++-- .../var-ossec-etc-ossec-agent.conf.j2 | 5 + .../test_vulnerability_detector/cases.yaml | 108 +++++++++--------- .../test_vulnerability_detector/conftest.py | 5 +- .../test_vulnerability_detector/test_scans.py | 43 ++----- 13 files changed, 219 insertions(+), 148 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py index a1869d8048..c3ecccac80 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -28,6 +28,7 @@ def restore_backup(host_manager, backup_configurations): def configure_environment(host_manager, configurations): def configure_host(host, host_configuration_role): + print(f"Configure {host}") host_os = host_manager.get_host_variables(host)['os_name'] configuration_file_path = configuration_filepath_os[host_os] @@ -39,6 +40,7 @@ def configure_host(host, host_configuration_role): host_configuration = host_configuration_role['agent'] current_configuration = host_manager.get_file_content(str(host), configuration_file_path) + print(current_configuration) new_configuration = set_section_wazuh_conf(host_configuration[0].get('sections'), current_configuration.split("\n")) new_configuration = [line for line in new_configuration if line.strip() != ""] diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index f6b0789849..71bc712ea0 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -4,8 +4,8 @@ STATE_INDEX_NAME = 'wazuh-vulnerabilities-states' # Indexer API methods -def get_vuln_state_value(host_manager, credentials={'user': 'wazuh', 'password': 'wazuh'}): - url = f"https://{host_manager.get_master_ip(host_manager)}:9200/{STATE_INDEX_NAME}_search?" +def get_indexer_values(host_manager, credentials={'user': 'admin', 'password': 'changeme'}, index='wazuh-alerts*'): + url = f"https://{host_manager.get_master_ip()}:9200/{index}_search?" query = { "query": { "match_all": {} diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py index 716ed86a0a..5067f8acf3 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py @@ -12,7 +12,10 @@ def truncate_managers_logs(host_manager): host_manager.truncate_file(agent, logs_filepath_os[host_os_name]) def truncate_logs(host_manager): - # for manager in host_manager.get_group_hosts('manager'): - # host_manager.truncate_file(manager, '/var/ossec/logs/alerts/alerts.json') truncate_managers_logs(host_manager) truncate_agents_logs(host_manager) + + +def truncate_alerts(host_manager): + for manager in host_manager.get_group_hosts('manager'): + host_manager.truncate_file(manager, '/var/ossec/logs/alerts/alerts.json') diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index 6f7eb409f5..98fda3e766 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -1,13 +1,16 @@ import os import tempfile +import re +from time import sleep from wazuh_testing.end_to_end import logs_filepath_os from wazuh_testing.tools.file import create_temp_file from wazuh_testing.tools.monitoring import HostMonitor from wazuh_testing.end_to_end.regex import get_event_regex +from multiprocessing.pool import ThreadPool -def monitoring_events(host_manager, monitoring_data): +def monitoring_events_host_monitoring(host_manager, monitoring_data): monitoring_file_content = '' results = {} @@ -15,24 +18,60 @@ def monitoring_events(host_manager, monitoring_data): monitoring_file_content += f"{host}:\n" for monitoring_event in data: string_limiter = "'" if '"' in monitoring_event.get("regex", "") else '"' - print(f"String limiter {string_limiter}") monitoring_file_content += f' - regex: {string_limiter}{monitoring_event.get("regex", "")}{string_limiter}\n' - monitoring_file_content += f' path: {string_limiter}{monitoring_event.get("path", "")}{string_limiter}\n' + monitoring_file_content += f' file: {string_limiter}{monitoring_event.get("file", "")}{string_limiter}\n' monitoring_file_content += f' timeout: {monitoring_event.get("timeout", 0)}\n' - temp_file = create_temp_file(monitoring_file_content) - try: + temp_file = create_temp_file(monitoring_file_content) + temporal_directory = tempfile.TemporaryDirectory() - print(temporal_directory.name) - results.update(HostMonitor(inventory_path=host_manager.get_inventory_path(), messages_path=temp_file, tmp_path=temporal_directory.name).run()) - except TimeoutError: - pass - os.remove(temp_file) + HostMonitor(inventory_path=host_manager.get_inventory_path(), + messages_path=temp_file, + tmp_path=temporal_directory.name).run() return results +def monitoring_events_multihost(host_manager, monitoring_data): + def monitoring_event(host_manager, host, monitoring_elements): + """ + Monitor the specified elements on a host. + + Parameters: + - host_manager: An object managing hosts. + - host: The target host. + - monitoring_elements: A list of dictionaries containing regex, timeout, and file. + + Returns: + - The first match found in the file content. + + Raises: + - TimeoutError if no match is found within the specified timeout. + """ + for element in monitoring_elements: + regex, timeout, monitoring_file = element['regex'], element['timeout'], element['file'] + current_timeout = 0 + regex_match = None + while current_timeout < timeout: + # Get file content + print(timeout) + file_content = host_manager.get_file_content(host, monitoring_file) + regex_match = re.search(regex, file_content) + if regex_match: + break + + sleep(5) + current_timeout += 5 + + if not regex_match: + raise TimeoutError("No match found within the specified timeout.") + + with ThreadPool() as pool: + # Use the pool to map the function to the list of hosts + pool.starmap(monitoring_event, [(host_manager, host, data) for host, data in monitoring_data.items()]) + + def generate_monitoring_logs_all_agent(host_manager, regex_list, timeout_list): monitoring_data = {} for agent in host_manager.get_group_hosts('agent'): @@ -41,7 +80,7 @@ def generate_monitoring_logs_all_agent(host_manager, regex_list, timeout_list): os_name = host_manager.get_host_variables(agent)['os_name'] monitoring_data[agent].append({ 'regex': regex_index, - 'path': logs_filepath_os[os_name], + 'file': logs_filepath_os[os_name], 'timeout': timeout_list[index] }) @@ -53,7 +92,7 @@ def generate_monitoring_logs_manager(host_manager, manager, regex, timeout): os_name = host_manager.get_host_variables(manager)['os_name'] monitoring_data[manager] = [{ 'regex': regex, - 'path': logs_filepath_os[os_name], + 'file': logs_filepath_os[os_name], 'timeout': timeout }] @@ -75,10 +114,9 @@ def generate_monitoring_alerts_all_agent(host_manager, events_metadata): event['parameters']['HOST_NAME'] = agent monitoring_element = { 'regex': get_event_regex(event), - 'path': '/var/ossec/logs/alerts/alerts.json', + 'file': '/var/ossec/logs/alerts/alerts.json', 'timeout': 120, } - if 'parameters' in metadata_agent: monitoring_element['parameters'] = metadata_agent['parameters'] diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py index f25037fdfd..3f6170d386 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py @@ -19,7 +19,7 @@ 'parameters': ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] }, 'vulnerability_alert':{ - 'regex': '.*HOST_NAME.*package:.*name":"PACKAGE_NAME".*version":"PACKAGE_VERSION".*"architecture":"ARCHITECTURE.*"cve":"CVE"', + 'regex': '.*HOST_NAME.*package":.*name":"PACKAGE_NAME".*version":"PACKAGE_VERSION".*"architecture":"ARCHITECTURE.*"cve":"CVE"', 'parameters': ['HOST_NAME', 'CVE', 'PACKAGE_NAME', 'PACKAGE_VERSION', 'ARCHITECTURE'] } } diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index f0b953f40e..859900a595 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -1,13 +1,16 @@ from wazuh_testing.end_to_end.regex import get_event_regex -from wazuh_testing.end_to_end.monitoring import monitoring_events +from wazuh_testing.end_to_end.monitoring import monitoring_events_multihost +from wazuh_testing.end_to_end.indexer_api import get_indexer_values, STATE_INDEX_NAME + from multiprocessing.pool import ThreadPool -def launch_remote_operation(host, operation, operation_data, host_manager): +def launch_remote_operation(host, operation_data, host_manager): host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] host_os_arch = host_manager.get_host_variables(host)['arch'] - system = host_manager.get_host_variables(host)['os_name'] + operation = operation_data['operation'] + if system == 'linux': system = host_manager.get_host_variables(host)['os'].split('_')[0] @@ -23,25 +26,35 @@ def launch_remote_operation(host, operation, operation_data, host_manager): host_manager.remove_package(host, package_name, system) elif operation == 'check_agent_vulnerability': + if operation_data['parameters']['alert_indexed']: - check_vuln_indexer(host_manager, operation_data['vulnerability_data']) + check_vuln_alert_indexer(host_manager, operation_data['vulnerability_data']) + if operation_data['parameters']['alert']: check_vuln_alert(host_manager, operation_data['vulnerability_data']) + if operation_data['parameters']['api']: check_vuln_alert_api(host_manager, operation_data['vulnerability_data']) + if operation_data['parameters']['state_indice']: check_vuln_state_index(host_manager, operation_data['vulnerability_data']) def check_vuln_state_index(host_manager, vulnerability_data): - pass + # Check Index values + # Retry 3 times, 10 timestamp + index_vuln_state_content = get_indexer_values(host_manager) + # Process alerts + +def check_vuln_alert_indexer(host_manager, vulnerability_data): + indexer_alerts = get_indexer_values(host_manager, index='wazuh-alerts*') + return indexer_alerts -def check_vuln_indexer(host_manager, vulnerability_data): - pass def check_vuln_alert_api(host_manager, vulnerability_data): pass + def check_vuln_alert(host_manager, vulnerability_data): monitoring_data = {} @@ -52,28 +65,32 @@ def check_vuln_alert(host_manager, vulnerability_data): agent_vulnerability_data_parameters = vulnerability_data[host_os_name][host_os_arch] agent_vulnerability_data_parameters['HOST_NAME'] = agent - agent_vulnerability_data = { - 'event': 'vulnerability_alert', - 'parameters': agent_vulnerability_data_parameters - } + for cve in agent_vulnerability_data_parameters['CVE']: + parameters = agent_vulnerability_data_parameters.copy() + parameters['CVE'] = cve + agent_vulnerability_data = { + 'event': 'vulnerability_alert', + 'parameters': parameters + } - regex = get_event_regex(agent_vulnerability_data) + regex = get_event_regex(agent_vulnerability_data) - monitoring_element = { - 'regex': regex, - 'path': '/var/ossec/logs/alerts/alerts.json', - 'timeout': 30, - } + monitoring_element = { + 'regex': regex, + 'file': '/var/ossec/logs/alerts/alerts.json', + 'timeout': 30, + } - if host_manager.get_host_variables(agent)['manager'] not in monitoring_data: - monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] + if host_manager.get_host_variables(agent)['manager'] not in monitoring_data: + monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] - monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) + monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) - monitoring_events(host_manager, monitoring_data) + monitoring_events_multihost(host_manager, monitoring_data) def launch_remote_sequential_operation_on_agent(agent, task_list, host_manager): + print(task_list) if task_list: for task in task_list: task_keys = list(task.keys()) @@ -82,9 +99,16 @@ def launch_remote_sequential_operation_on_agent(agent, task_list, host_manager): launch_remote_operation(agent, operation, operation_data, host_manager) -def launch_parallel_operations(task_list, host_manager, group='agent'): - agents = host_manager.get_group_hosts('agent') - parallel_configuration = [(agent, task_list, host_manager) for agent in agents] - with ThreadPool() as pool: - # Use the pool to map the function to the list of hosts - pool.starmap(launch_remote_sequential_operation_on_agent, parallel_configuration) +def launch_parallel_operations(task_list, host_manager): + print("Launch parallel operations") + for task in task_list: + parallel_configuration = [] + target = task['target'] + + for host in host_manager.get_group_hosts(target): + print(f"Append {host} {task_list} {host_manager}") + parallel_configuration.append((host, task, host_manager)) + + with ThreadPool() as pool: + # Use the pool to map the function to the list of hosts + pool.starmap(launch_remote_operation, parallel_configuration) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index c56ccbc410..b2b47beea8 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -1,19 +1,18 @@ -from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_manager, monitoring_events +from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_manager, monitoring_events_multihost from wazuh_testing.end_to_end.wazuh_api import get_agents_id - def wait_until_vd_is_updated(host_manager): monitoring_data = {} for manager in host_manager.get_group_hosts('manager'): - monitoring_data = generate_monitoring_logs_manager(host_manager, manager, 'Starting vulnerability scan', 600) + monitoring_data = generate_monitoring_logs_manager(host_manager, manager, 'Starting vulnerability scan', 800) - monitoring_events(host_manager, monitoring_data) + monitoring_events_multihost(host_manager, monitoring_data) def wait_until_vuln_scan_agents_finished(host_manager): for agent in host_manager.get_group_hosts('agent'): manager_host = host_manager.get_host_variables(agent)['manager'] agents_id = get_agents_id(host_manager) - monitoring_data = generate_monitoring_logs_manager(host_manager, manager_host,rf"Finished vulnerability assessment for agent '{agents_id[agent]}'", 30) - monitoring_events(host_manager, monitoring_data) + monitoring_data = generate_monitoring_logs_manager(host_manager, manager_host, rf"Finished vulnerability assessment for agent '{agents_id[agent]}'", 700) + monitoring_events_multihost(host_manager, monitoring_data) diff --git a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py index 5a4de5086a..30342aa3e1 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py @@ -432,6 +432,7 @@ def get_results(self, callback=_callback_default, accum_results=1, timeout=-1, u else: msg = self._queue.peek(position=position, block=True, timeout=self._time_step) position += 1 + print(f"Monitoring line {msg}") item = callback(msg) logging.debug(msg) if item is not None and item: @@ -936,7 +937,7 @@ def run(self, update_position=False): for file_collector in self._file_content_collectors: file_collector.terminate() file_collector.join() - self.clean_tmp_files() + # self.clean_tmp_files() break time.sleep(self._time_step) self.check_result() diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 18b2a11bf1..4fd41e0e74 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -107,7 +107,12 @@ def get_host(self, host: str): return testinfra.get_host(f"ansible://{host}?ansible_inventory={self.inventory_path}") def truncate_file(self, host: str, filepath: str): - self.get_host(host).ansible("command", f"truncate -s 0 {filepath}", check=False) + ansible_command = 'file' + if 'os_name' in self.get_host_variables(host): + host_os_name = self.get_host_variables(host)['os_name'] + ansible_command = 'win_copy' if self.get_host_variables(host)['os_name'] == 'windows' else 'copy' + + result = self.get_host(host).ansible(ansible_command, f"dest='{filepath}' content=''", check=False) def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/ossec.conf', check: bool = False): @@ -206,14 +211,26 @@ def get_file_content(self, host: str, file_path: str): host (str): Hostname file_path (str) : Path of the file """ - # return self.get_host(host).file(file_path).content_string - testinfra_host = self.get_host(host) - result = testinfra_host.ansible("slurp", f"src='{file_path}'", check=False) - if 'content' not in result: - raise Exception(f"No content value in {result}") + ansible_method = 'command' + command = 'cat' + if 'os_name' in self.get_host_variables(host) and self.get_host_variables(host)['os_name'] == 'windows': + ansible_method = 'win_shell' + command = 'type' + + result = self.get_host(host).ansible(ansible_method, f"{command} '{file_path}'", check=False) - return base64.b64decode(result['content']).decode('utf-8') + return result['stdout'] + + + # testinfra_host = self.get_host(host) + # result = testinfra_host.ansible("slurp", f"src='{file_path}'", check=False) + # print(result) + # if 'content' not in result: + # raise Exception(f"No content value in {result}") + + # decoded = base64.b64decode(result['content']).decode('utf-8') + # return decoded def apply_config(self, config_yml_path: str, dest_path: str = WAZUH_CONF, clear_files: list = None, @@ -436,10 +453,12 @@ def install_package(self, host, url, system='ubuntu'): a = self.get_host(host).ansible("apt", f"deb={url}", check=False) if a['changed'] == True and a['stderr'] == '': result = True + print(a) elif system == 'centos': a = self.get_host(host).ansible("yum", f"name={url} state=present sslverify=false disable_gpg_check=True", check=False) if 'rc' in a and a['rc'] == 0 and a['changed'] == True: result = True + print(a) def get_master_ip(self): """ diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 b/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 index 165d8e64b1..350d4fedd8 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 @@ -77,6 +77,11 @@ {{ wazuh_agent_config.enrollment.use_source_ip }} {% endif %} + {% else %} + + {{ ansible_hostname }} + + {% endif %} diff --git a/tests/end_to_end/test_vulnerability_detector/cases.yaml b/tests/end_to_end/test_vulnerability_detector/cases.yaml index 7015054ef4..d98c5ebd17 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases.yaml @@ -4,59 +4,61 @@ preconditions: null body: tasks: - - install_package: - package: - centos: - amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm - arm: https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.17-2PGDG.rhel7.aarch64.rpm - ubuntu: - amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb - arm: https://grafana.com/grafana/download/8.5.5?edition=enterprise&platform=arm - windows: - amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe - macos: - amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg - - check_agent_vulnerability: - parameters: - alert_indexed: False - api: False - alert: True - state_indice: False - vulnerability_data: - centos: - amd64: - PACKAGE_NAME: "rclone" - PACKAGE_VERSION: "1.49.5" - CVE: CVE-2020-28924 - ARCHITECTURE: 'x86_64' - arm: - PACKAGE_NAME: "postgresql11-libs" - PACKAGE_VERSION: "11.17.2" - CVE: CVE-2020-28924 - ARCHITECTURE: 'arm64' - ubuntu: - amd64: - PACKAGE_NAME: "rclone" - PACKAGE_VERSION: "1.49.5" - CVE: CVE-2020-28924 - ARCHITECTURE: 'x86_64' - arm: - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" - CVE: CVE-2020-28924 - ARCHITECTURE: 'arm64' - windows: - amd64: - PACKAGE_NAME: "vlc" - PACKAGE_VERSION: "3.0.6" - CVE: CVE-2020-28924 - ARCHITECTURE: 'x86_64' - macos: - amd64: - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.0.1" - CVE: CVE-2020-28924 - ARCHITECTURE: 'x86_64' + - operation: install_package + target: agent + package: + centos: + amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm + arm: https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.17-2PGDG.rhel7.aarch64.rpm + ubuntu: + amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb + arm: https://grafana.com/grafana/download/8.5.5?edition=enterprise&platform=arm + windows: + amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe + macos: + amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg + - operation: check_agent_vulnerability + target: manager + parameters: + alert: True + alert_indexed: False + api: False + state_indice: False + vulnerability_data: + centos: + amd64: + PACKAGE_NAME: "rclone" + PACKAGE_VERSION: "1.49.5-1" + CVE: ["CVE-2020-28924"] + ARCHITECTURE: 'x86_64' + arm: + PACKAGE_NAME: "postgresql11-libs" + PACKAGE_VERSION: "11.17.2" + CVE: ["CVE-2020-28924"] + ARCHITECTURE: 'arm64' + ubuntu: + amd64: + PACKAGE_NAME: "rclone" + PACKAGE_VERSION: "1.49.5-1" + CVE: ["CVE-2020-28924"] + ARCHITECTURE: 'x86_64' + arm: + PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.5-1" + CVE: ["CVE-2020-28924"] + ARCHITECTURE: 'arm64' + windows: + amd64: + PACKAGE_NAME: "vlc" + PACKAGE_VERSION: "3.0.6-1" + CVE: ["CVE-2020-28924"] + ARCHITECTURE: 'x86_64' + macos: + amd64: + PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.0.1" + CVE: ["CVE-2020-28924"] + ARCHITECTURE: 'x86_64' # check_alerts: # centos: diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index 0d9d40c9f9..db0368f310 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -45,12 +45,9 @@ def setup(preconditions, teardown, host_manager): # monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) # monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) - # monitoring_events(host_manager, monitoring_data) + # monitoring_events_multihost(host_manager, monitoring_data) yield if teardown: launch_parallel_operations(teardown, host_manager) - - for host in host_manager.get_group_hosts('manager'): - host_manager.truncate_file(host, '/var/ossec/logs/alerts/alerts.json') \ No newline at end of file diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_scans.py index b823a97f93..c752fb7e26 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_scans.py +++ b/tests/end_to_end/test_vulnerability_detector/test_scans.py @@ -1,35 +1,20 @@ -import argparse -import base64 import os -import re -import subprocess -import tempfile -from multiprocessing.pool import ThreadPool - import pytest -import ansible_runner import yaml -from ansible.inventory.manager import InventoryManager -from ansible.parsing.dataloader import DataLoader - -from wazuh_testing.tools.configuration import ( - load_configuration_template, set_section_wazuh_conf -) -from wazuh_testing.tools.monitoring import HostMonitor -from wazuh_testing.tools.system import HostManager -from wazuh_testing.api import make_api_call, get_token_login_api -from wazuh_testing.end_to_end import get_alert_indexer_api +from wazuh_testing.tools.configuration import load_configuration_template from wazuh_testing.end_to_end.configuration import backup_configurations, restore_backup, configure_environment from wazuh_testing.end_to_end.services import control_environment from wazuh_testing.end_to_end.logs import truncate_agents_logs, truncate_managers_logs, truncate_logs -from wazuh_testing.end_to_end.wazuh_api import get_agents_id, get_agents_vulnerabilities -from wazuh_testing.end_to_end.indexer_api import get_vuln_state_value +from wazuh_testing.end_to_end.wazuh_api import get_agents_vulnerabilities from wazuh_testing.end_to_end.waiters import wait_until_vd_is_updated, wait_until_vuln_scan_agents_finished -from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_all_agent, monitoring_events +from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_all_agent, monitoring_events_multihost from wazuh_testing.end_to_end.regex import get_event_regex from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations + +TIMEOUT_SYSCOLLECTOR_SCAN = 120 + current_dir = os.path.dirname(__file__) configurations_dir = os.path.join(current_dir, "data", "configurations") cases = {} @@ -102,11 +87,9 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): monitoring_data = generate_monitoring_logs_all_agent(host_manager, [get_event_regex({'event': 'syscollector_scan_start'}), get_event_regex({'event': 'syscollector_scan_end'})], - [20, 20]) - - results = monitoring_events(host_manager, monitoring_data) + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) - assert all(results.values()), f"First Syscollector scan was not detected in some of the agents {results}" + monitoring_events_multihost(host_manager, monitoring_data) # Truncate agents logs to detect second scan truncate_agents_logs(host_manager) @@ -134,11 +117,9 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): monitoring_data = generate_monitoring_logs_all_agent(host_manager, [get_event_regex({'event': 'syscollector_scan_start'}), get_event_regex({'event': 'syscollector_scan_end'})], - [60, 60]) - - results = monitoring_events(host_manager, monitoring_data) + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) - assert all(results.values()), f"Second Syscollector scan was not detected in some of the agents {results}" + monitoring_events_multihost(host_manager, monitoring_data) # WARNING # Is possible that second scan will not produce expected Finished Scan in the agent. @@ -157,7 +138,7 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): # check_vuln_state_index(state_index_content_after_second_scan, agents_vuln_before_second_scan) -# @pytest.mark.dependency(depends=["test_syscollector_second_scan"]) +# @pytest.mark.dependency(depends=["test_syscollector_initial_scans"]) @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) def test_vulnerability_detector_scans(preconditions, body, teardown, setup, host_manager): """ @@ -174,5 +155,5 @@ def test_vulnerability_detector_scans(preconditions, body, teardown, setup, host # Check agent System state # To Do - # results = monitoring_events(host_manager, monitoring_data) + # results = monitoring_events_multihost(host_manager, monitoring_data) # assert all(results.values()), f"Expected message was not triggered for some agents, {results}" From b6406e16d7f9a8f74afc51fb4c7ec726bd15a91f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 20 Nov 2023 18:35:00 +0000 Subject: [PATCH 008/174] refac: improve configuration module for E2E --- .../wazuh_testing/end_to_end/cases_handler.py | 0 .../wazuh_testing/end_to_end/configuration.py | 117 ++++++++++++------ 2 files changed, 79 insertions(+), 38 deletions(-) delete mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/cases_handler.py diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/cases_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/cases_handler.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py index c3ecccac80..360f37a6eb 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -1,57 +1,98 @@ +""" +Module Name: configuration.py + +Description: + This module provides functions for configuring and managing host configurations using the HostManager class + and related tools. + +Functions: + 1. backup_configurations(host_manager: HostManager) -> dict: + Backup configurations for all hosts in the specified host manager. + + 2. restore_backup(host_manager: HostManager, backup_configurations: dict) -> None: + Restore configurations for all hosts in the specified host manager. + + 3. configure_environment(host_manager: HostManager, configurations: dict) -> None: + Configure the environment for all hosts in the specified host manager. + This function uses ThreadPool to parallelize the configuration process. + +Module Usage: + This module can be used to manage configurations for a distributed system, with functions for + backup, restore, and parallelized environment configuration. +""" from multiprocessing.pool import ThreadPool import xml.dom.minidom -from ansible.parsing.dataloader import DataLoader from wazuh_testing.end_to_end import configuration_filepath_os from wazuh_testing.tools.configuration import set_section_wazuh_conf +from wazuh_testing.tools.system import HostManager + -# Configuration methods -def backup_configurations(host_manager): - backup_configurations = {} - for host in host_manager.get_group_hosts('all'): - host_variables = host_manager.get_host_variables(host) - host_os = host_variables['os_name'] - configuration_file_path = configuration_filepath_os[host_os] - current_configuration = host_manager.get_file_content(str(host), configuration_file_path) - backup_configurations[str(host)] = current_configuration - return backup_configurations +def backup_configurations(host_manager: HostManager) -> dict: + """ + Backup configurations for all hosts in the specified host manager. + Args: + host_manager: An instance of the HostManager class containing information about hosts. -def restore_backup(host_manager, backup_configurations): - for host in host_manager.get_group_hosts('all'): - host_variables = host_manager.get_host_variables(host) - host_os = host_variables['os_name'] - configuration_file_path = configuration_filepath_os[host_os] - host_manager.modify_file_content(str(host), configuration_file_path, backup_configurations[str(host)]) + Returns: + dict: A dictionary mapping host names to their configurations. + """ + return { + str(host): host_manager.get_file_content(str(host), configuration_filepath_os[host_manager.get_host_variables(host)['os_name']]) + for host in host_manager.get_group_hosts('all') + } -def configure_environment(host_manager, configurations): - def configure_host(host, host_configuration_role): - print(f"Configure {host}") - host_os = host_manager.get_host_variables(host)['os_name'] - configuration_file_path = configuration_filepath_os[host_os] +def restore_backup(host_manager: HostManager, backup_configurations: dict) -> None: + """ + Restore configurations for all hosts in the specified host manager. - host_groups = host_manager.get_host_groups(host) - host_configuration = None - if 'manager' in host_groups: - host_configuration = host_configuration_role['manager'] - elif 'agent' in host_groups: - host_configuration = host_configuration_role['agent'] + Args: + host_manager: An instance of the HostManager class containing information about hosts. + backup_configurations: A dictionary mapping host names to their configurations. + """ + [host_manager.modify_file_content(str(host), configuration_filepath_os[host_manager.get_host_variables(host)['os_name']], backup_configurations[str(host)]) + for host in host_manager.get_group_hosts('all')] - current_configuration = host_manager.get_file_content(str(host), configuration_file_path) - print(current_configuration) - new_configuration = set_section_wazuh_conf(host_configuration[0].get('sections'), current_configuration.split("\n")) - new_configuration = [line for line in new_configuration if line.strip() != ""] - dom = xml.dom.minidom.parseString(''.join(new_configuration)) - new_configuration = "\n".join(dom.toprettyxml().split("\n")[1:]) +def configure_host(host: str, host_configuration_role: dict, host_manager: HostManager) -> None: + """ + Configure a specific host. - host_manager.modify_file_content(str(host), configuration_file_path, new_configuration) + Args: + host: The name of the host to be configured. + host_configuration_role: Role of the configured host for the host. + host_manager: An instance of the HostManager class containing information about hosts. + """ + host_os = host_manager.get_host_variables(host)['os_name'] + config_file_path = configuration_filepath_os[host_os] - loader = DataLoader() - configure_environment_parallel_map = [ (host, configurations) for host in host_manager.get_group_hosts('all')] + host_groups = host_manager.get_host_groups(host) + host_config = host_configuration_role.get('manager' if 'manager' in host_groups else 'agent', None) + + if not host_config: + raise TypeError(f"Host {host} configuration does not include a valid role (manager or agent): {host_configuration_role}") + + current_config = host_manager.get_file_content(str(host), config_file_path) + new_config = set_section_wazuh_conf(host_config[0].get('sections'), current_config.split("\n")) + new_config = "\n".join(xml.dom.minidom.parseString(''.join(new_config)).toprettyxml().split("\n")[1:]) + + host_manager.modify_file_content(str(host), config_file_path, new_config) + + +def configure_environment(host_manager: HostManager, configurations: dict) -> None: + """ + Configure the environment for all hosts in the specified host manager. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + configurations: A dictionary mapping host roles to their configuration details. + """ + configure_environment_parallel_map = [(host, configurations) for host in host_manager.get_group_hosts('all')] with ThreadPool() as pool: - pool.starmap(configure_host, configure_environment_parallel_map) + pool.starmap(configure_host, [(host, config, host_manager) for host, config in configure_environment_parallel_map]) + From e0b6bd7bec428f8eb819f8d1856e51266ae235b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 20 Nov 2023 18:49:53 +0000 Subject: [PATCH 009/174] refact: improve indexer_api module --- .../wazuh_testing/end_to_end/indexer_api.py | 46 ++++++++++++++++++- 1 file changed, 44 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index 71bc712ea0..c4e2fd8036 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -1,10 +1,51 @@ +""" +Module Name: indexer_api + +Description: + This module provides functions for interacting with the Wazuh Elasticsearch indexer API, + specifically for retrieving vulnerability state information. + +Constants: + - STATE_INDEX_NAME: The name of the index used for storing Wazuh vulnerabilities states. + +Functions: + 1. get_indexer_values(host_manager, credentials={'user': 'admin', 'password': 'changeme'}, index='wazuh-alerts*') -> str: + Get values from the Wazuh Elasticsearch indexer API. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + credentials (Optional): A dictionary containing the Elasticsearch credentials. Defaults to + {'user': 'admin', 'password': 'changeme'}. + index (Optional): The Elasticsearch index name. Defaults to 'wazuh-alerts*'. + + Returns: + str: The response text from the indexer API. + +Module Usage: + This module can be used to retrieve information from the Wazuh Elasticsearch indexer API, specifically for + vulnerability states. +""" import requests +from wazuh_testing.tools.system import HostManager + STATE_INDEX_NAME = 'wazuh-vulnerabilities-states' -# Indexer API methods -def get_indexer_values(host_manager, credentials={'user': 'admin', 'password': 'changeme'}, index='wazuh-alerts*'): + +def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': 'admin', 'password': 'changeme'}, index: str = 'wazuh-alerts*') -> str: + """ + Get values from the Wazuh Elasticsearch indexer API. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + credentials (Optional): A dictionary containing the Elasticsearch credentials. Defaults to + {'user': 'admin', 'password': 'changeme'}. + index (Optional): The Elasticsearch index name. Defaults to 'wazuh-alerts*'. + + Returns: + str: The response text from the indexer API. + """ url = f"https://{host_manager.get_master_ip()}:9200/{index}_search?" query = { "query": { @@ -14,3 +55,4 @@ def get_indexer_values(host_manager, credentials={'user': 'admin', 'password': ' response = requests.get(url=url, params={'pretty': 'true'}, json=query, verify=False, auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password'])) return response.text + From f3b46173179a1951bd13c7a2982858842a4701b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 20 Nov 2023 18:50:26 +0000 Subject: [PATCH 010/174] docs: include docstrings to logs E2E module --- .../wazuh_testing/end_to_end/logs.py | 74 +++++++++++++++++-- 1 file changed, 67 insertions(+), 7 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py index 5067f8acf3..690ff26aa8 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py @@ -1,21 +1,81 @@ +""" +Module Name: logs + +Description: + This module provides functions for truncating logs and alerts for Wazuh agents and managers. + +Functions: + 1. truncate_agents_logs(host_manager: HostManager) -> None: + Truncate logs for Wazuh agents. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + + 2. truncate_managers_logs(host_manager: HostManager) -> None: + Truncate logs for Wazuh managers. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + + 3. truncate_logs(host_manager: HostManager) -> None: + Truncate logs for both Wazuh agents and managers. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + + 4. truncate_alerts(host_manager: HostManager) -> None: + Truncate Wazuh alerts. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + +Module Usage: + This module can be used to truncate logs and alerts for Wazuh agents and managers. +""" + from wazuh_testing.end_to_end import logs_filepath_os +from wazuh_testing.tools.system import HostManager + +def truncate_agents_logs(host_manager: HostManager) -> None: + """ + Truncate logs for Wazuh agents. -def truncate_agents_logs(host_manager): + Args: + host_manager: An instance of the HostManager class containing information about hosts. + """ for agent in host_manager.get_group_hosts('agent'): host_os_name = host_manager.get_host_variables(agent)['os_name'] host_manager.truncate_file(agent, logs_filepath_os[host_os_name]) -def truncate_managers_logs(host_manager): - for agent in host_manager.get_group_hosts('manager'): - host_os_name = host_manager.get_host_variables(agent)['os_name'] - host_manager.truncate_file(agent, logs_filepath_os[host_os_name]) +def truncate_managers_logs(host_manager: HostManager) -> None: + """ + Truncate logs for Wazuh managers. -def truncate_logs(host_manager): + Args: + host_manager: An instance of the HostManager class containing information about hosts. + """ + for manager in host_manager.get_group_hosts('manager'): + host_os_name = host_manager.get_host_variables(manager)['os_name'] + host_manager.truncate_file(manager, logs_filepath_os[host_os_name]) + +def truncate_logs(host_manager: HostManager) -> None: + """ + Truncate logs for both Wazuh agents and managers. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + """ truncate_managers_logs(host_manager) truncate_agents_logs(host_manager) +def truncate_alerts(host_manager: HostManager) -> None: + """ + Truncate Wazuh alerts. -def truncate_alerts(host_manager): + Args: + host_manager: An instance of the HostManager class containing information about hosts. + """ for manager in host_manager.get_group_hosts('manager'): host_manager.truncate_file(manager, '/var/ossec/logs/alerts/alerts.json') + From 152baf37d3a2a3e1e48001e0f758f2de04b4efa7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 20 Nov 2023 18:51:20 +0000 Subject: [PATCH 011/174] docs: include docstrings for monitoring module --- .../wazuh_testing/end_to_end/monitoring.py | 110 +++++++++++++++--- 1 file changed, 92 insertions(+), 18 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index 98fda3e766..bc1f5a743f 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -1,16 +1,56 @@ -import os +""" +Module Name: wazuh_monitoring + +Description: + This module provides functions for monitoring events, files, and alerts in a Wazuh environment. + +Imports: + - os + - tempfile + - re + - sleep + - requests + - ThreadPool + +Functions: + 1. monitoring_events_host_monitoring(host_manager: HostManager, monitoring_data: dict) -> dict: + Monitor events on hosts using the HostMonitor. + + 2. monitoring_events_multihost(host_manager: HostManager, monitoring_data: dict) -> None: + Monitor events on multiple hosts concurrently. + + 3. generate_monitoring_logs_all_agent(host_manager: HostManager, regex_list: list, timeout_list: list) -> dict: + Generate monitoring data for logs on all agent hosts. + + 4. generate_monitoring_logs_manager(host_manager: HostManager, manager: str, regex: str, timeout: int) -> dict: + Generate monitoring data for logs on a specific manager host. + + 5. generate_monitoring_alerts_all_agent(host_manager: HostManager, events_metadata: dict) -> dict: + Generate monitoring data for alerts on all agent hosts. +""" + import tempfile import re from time import sleep - +from multiprocessing.pool import ThreadPool from wazuh_testing.end_to_end import logs_filepath_os from wazuh_testing.tools.file import create_temp_file from wazuh_testing.tools.monitoring import HostMonitor from wazuh_testing.end_to_end.regex import get_event_regex -from multiprocessing.pool import ThreadPool +from wazuh_testing.tools.system import HostManager -def monitoring_events_host_monitoring(host_manager, monitoring_data): +def monitoring_events_host_monitoring(host_manager: HostManager, monitoring_data: dict) -> dict: + """ + Monitor events on hosts using the HostMonitor class. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + monitoring_data: A dictionary containing monitoring data for each host. + + Returns: + dict: Results of the monitoring process. + """ monitoring_file_content = '' results = {} @@ -33,19 +73,23 @@ def monitoring_events_host_monitoring(host_manager, monitoring_data): return results -def monitoring_events_multihost(host_manager, monitoring_data): +def monitoring_events_multihost(host_manager: HostManager, monitoring_data: dict) -> None: + """ + Monitor events on multiple hosts concurrently. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + monitoring_data: A dictionary containing monitoring data for each host. + """ def monitoring_event(host_manager, host, monitoring_elements): """ Monitor the specified elements on a host. - Parameters: - host_manager: An object managing hosts. - host: The target host. - monitoring_elements: A list of dictionaries containing regex, timeout, and file. - Returns: - The first match found in the file content. - Raises: - TimeoutError if no match is found within the specified timeout. """ @@ -54,8 +98,6 @@ def monitoring_event(host_manager, host, monitoring_elements): current_timeout = 0 regex_match = None while current_timeout < timeout: - # Get file content - print(timeout) file_content = host_manager.get_file_content(host, monitoring_file) regex_match = re.search(regex, file_content) if regex_match: @@ -72,7 +114,18 @@ def monitoring_event(host_manager, host, monitoring_elements): pool.starmap(monitoring_event, [(host_manager, host, data) for host, data in monitoring_data.items()]) -def generate_monitoring_logs_all_agent(host_manager, regex_list, timeout_list): +def generate_monitoring_logs_all_agent(host_manager: HostManager, regex_list: list, timeout_list: list) -> dict: + """ + Generate monitoring data for logs on all agent hosts. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + regex_list: A list of regular expressions for monitoring. + timeout_list: A list of timeout values for monitoring. + + Returns: + dict: Monitoring data for logs on all agent hosts. + """ monitoring_data = {} for agent in host_manager.get_group_hosts('agent'): monitoring_data[agent] = [] @@ -82,25 +135,44 @@ def generate_monitoring_logs_all_agent(host_manager, regex_list, timeout_list): 'regex': regex_index, 'file': logs_filepath_os[os_name], 'timeout': timeout_list[index] - }) return monitoring_data -def generate_monitoring_logs_manager(host_manager, manager, regex, timeout): +def generate_monitoring_logs_manager(host_manager: HostManager, manager: str, regex: str, timeout: int) -> dict: + """ + Generate monitoring data for logs on a specific manager host. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + manager: The target manager host. + regex: The regular expression for monitoring. + timeout: The timeout value for monitoring. + + Returns: + dict: Monitoring data for logs on the specified manager host. + """ monitoring_data = {} os_name = host_manager.get_host_variables(manager)['os_name'] monitoring_data[manager] = [{ 'regex': regex, 'file': logs_filepath_os[os_name], 'timeout': timeout - }] - return monitoring_data -def generate_monitoring_alerts_all_agent(host_manager, events_metadata): +def generate_monitoring_alerts_all_agent(host_manager: HostManager, events_metadata: dict) -> dict: + """ + Generate monitoring data for alerts on all agent hosts. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + events_metadata: Metadata containing information about events. + + Returns: + dict: Monitoring data for alerts on all agent hosts. + """ monitoring_data = {} for agent in host_manager.get_group_hosts('agent'): @@ -110,7 +182,7 @@ def generate_monitoring_alerts_all_agent(host_manager, events_metadata): if not host_manager.get_host_variables(agent)['manager'] in monitoring_data: monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] - for event in metadata_agent[agent.get_host_variables(agent)['arch']]: + for event in metadata_agent[host_manager.get_host_variables(agent)['arch']]: event['parameters']['HOST_NAME'] = agent monitoring_element = { 'regex': get_event_regex(event), @@ -120,4 +192,6 @@ def generate_monitoring_alerts_all_agent(host_manager, events_metadata): if 'parameters' in metadata_agent: monitoring_element['parameters'] = metadata_agent['parameters'] - monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) \ No newline at end of file + monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) + + return monitoring_data From 2c7ea3f1309f763f582b32272b122e2133f81bb3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 20 Nov 2023 18:52:12 +0000 Subject: [PATCH 012/174] fix: replace docstring placeholders --- .../wazuh_testing/end_to_end/wazuh_api.py | 22 ------------------- 1 file changed, 22 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py index 8cc29ea6c4..19635f4410 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py @@ -13,28 +13,6 @@ Parameters: - host_manager (HostManager): An instance of the HostManager class representing the Wazuh environment. - -Usage Example: -```python -from host_manager_module import HostManager -from wazuh_api_module import get_api_parameters, get_api_token, get_agents_id, get_agents_vulnerabilities - -# Create an instance of HostManager -host_manager = HostManager() - -# Retrieve API parameters -api_params = get_api_parameters(host_manager) - -# Obtain API token -api_token = get_api_token(host_manager) - -# Retrieve Wazuh agents IDs -agents_ids = get_agents_id(host_manager) - -# Retrieve vulnerability information for agents -agents_vuln = get_agents_vulnerabilities(host_manager) - -Note: Make sure to replace 'host_manager_module' and 'wazuh_api_module' with the actual module names in your project. """ from wazuh_testing.api import make_api_call, get_token_login_api From 0ea1237781a219e462f4ffdc6518f1811c55e464 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 20 Nov 2023 18:53:50 +0000 Subject: [PATCH 013/174] feat: remove non-basic VD tests --- .../test_vulnerability_detector/cases.yaml | 301 ------------------ .../test_vulnerability_detector/test_scans.py | 21 -- 2 files changed, 322 deletions(-) delete mode 100644 tests/end_to_end/test_vulnerability_detector/cases.yaml diff --git a/tests/end_to_end/test_vulnerability_detector/cases.yaml b/tests/end_to_end/test_vulnerability_detector/cases.yaml deleted file mode 100644 index d98c5ebd17..0000000000 --- a/tests/end_to_end/test_vulnerability_detector/cases.yaml +++ /dev/null @@ -1,301 +0,0 @@ -- case: "Installation of a vulnerable package" - id: "install_package" - description: "Installation of a vulnerable package" - preconditions: null - body: - tasks: - - operation: install_package - target: agent - package: - centos: - amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm - arm: https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.17-2PGDG.rhel7.aarch64.rpm - ubuntu: - amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb - arm: https://grafana.com/grafana/download/8.5.5?edition=enterprise&platform=arm - windows: - amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe - macos: - amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg - - operation: check_agent_vulnerability - target: manager - parameters: - alert: True - alert_indexed: False - api: False - state_indice: False - vulnerability_data: - centos: - amd64: - PACKAGE_NAME: "rclone" - PACKAGE_VERSION: "1.49.5-1" - CVE: ["CVE-2020-28924"] - ARCHITECTURE: 'x86_64' - arm: - PACKAGE_NAME: "postgresql11-libs" - PACKAGE_VERSION: "11.17.2" - CVE: ["CVE-2020-28924"] - ARCHITECTURE: 'arm64' - ubuntu: - amd64: - PACKAGE_NAME: "rclone" - PACKAGE_VERSION: "1.49.5-1" - CVE: ["CVE-2020-28924"] - ARCHITECTURE: 'x86_64' - arm: - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5-1" - CVE: ["CVE-2020-28924"] - ARCHITECTURE: 'arm64' - windows: - amd64: - PACKAGE_NAME: "vlc" - PACKAGE_VERSION: "3.0.6-1" - CVE: ["CVE-2020-28924"] - ARCHITECTURE: 'x86_64' - macos: - amd64: - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.0.1" - CVE: ["CVE-2020-28924"] - ARCHITECTURE: 'x86_64' - - # check_alerts: - # centos: - # amd64: - # - event: syscollector_install_package_alert_yum - # parameters: - # PACKAGE_NAME: "rclone" - # PACKAGE_VERSION: "1.49.5" - # arm: - # - event: syscollector_install_package_alert_yum - # parameters: - # PACKAGE_NAME: "rclone" - # PACKAGE_VERSION: "1.49.5" - # ubuntu: - # amd64: - # - event: syscollector_install_package_alert_apt - # parameters: - # PACKAGE_NAME: "rclone" - # PACKAGE_VERSION: "1.49.5" - # arm: - # - event: syscollector_install_package_alert_apt - # parameters: - # PACKAGE_NAME: "rclone" - # PACKAGE_VERSION: "1.49.5" - # windows: - # amd64: - # - event: syscollector_install_package_alert_apt - # parameters: - # PACKAGE_NAME: "VideoLAN" - # PACKAGE_VERSION: "3.0.6" - # macos: - # amd64: - # - event: syscollector_install_package_alert_apt - # parameters: - # PACKAGE_NAME: - # PACKAGE_VERSION: - # teardown: - # - remove_package: - # package: - # centos: - # amd: rclone - # arm: rclone - # ubuntu: - # amd: rclone - # arm: rclone - # windows: - # amd64: C:\\\\Program Files\\\\VideoLAN\\\\VLC\\\\uninstall.exe - # macos: - # amd64: - -# - case: "Updating a vulnerable package that remains vulnerable to the same CVE" -# id: "update_vuln_package_remain_vulnerable" -# description: "Updating a vulnerable package that remains vulnerable to the same CVE" -# preconditions: -# tasks: -# - install_package: -# package: -# centos: -# amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm -# ubuntu: -# amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb -# check_alerts: -# centos: -# - event: syscollector_install_package_alert_yum -# parameters: -# PACKAGE_NAME: "rclone" -# PACKAGE_VERSION: "1.49.5" -# ubuntu: -# - event: syscollector_install_package_alert_apt -# parameters: -# PACKAGE_NAME: "rclone" -# PACKAGE_VERSION: "1.49.5" -# body: -# tasks: -# - install_package: -# package: -# centos: -# amd64: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.rpm -# ubuntu: -# amd64: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.deb -# check_alerts: -# centos: -# - event: syscollector_upgrade_package_alert_yum -# parameters: -# PACKAGE_NAME: "rclone" -# PACKAGE_VERSION: "1.50.0" -# ubuntu: -# - event: syscollector_install_package_alert_apt -# parameters: -# PACKAGE_NAME: "rclone" -# PACKAGE_VERSION: "1.50.0" -# teardown: -# - remove_package: -# package: -# centos: rclone -# ubuntu: rclone - -# ---- - -# - case: "Updating a vulnerable package that becomes vulnerable to another CVE" -# id: "update_vuln_package_additional_vulnerablility" -# description: "Updating a vulnerable package that becomes vulnerable to another CVE" -# preconditions: -# tasks: -# - install_package: -# package: -# centos: https://nmap.org/dist/nmap-6.47-1.x86_64.rpm -# ubuntu: https://sourceforge.net/projects/webadmin/files/webmin/1.999/webmin_1.999_all.deb/download -# check_alerts: -# centos: -# - event: install_package -# name: "nmap" -# version: "6.47" -# ubuntu: -# - event: install_package -# name: "webmin" -# version: "1.999" -# body: -# tasks: -# - install_package: -# package: -# centos: https://nmap.org/dist/nmap-7.00-1.x86_64.rpm -# ubuntu: https://sourceforge.net/projects/webadmin/files/webmin/1.999/webmin_2.000_all.deb/download -# check_alerts: -# centos: -# - event: upgrade_package -# name: "nmap" -# version: "7.00" -# ubuntu: -# - event: upgrade_package -# name: "webmin" -# version: "2.000" -# teardown: -# - remove_package: -# package: -# centos: nmap -# ubuntu: webmin - - -# ------------------------ - -# - case: "Vulnerable package that update is also vulnerable to another CVE" -# id: "update_vuln_package_additional_vulnerablility" -# description: "Vulnerable package that update is also vulnerable to another CVE" -# preconditions: -# tasks: -# - install_package: -# package: -# centos: https://repo.mongodb.org/yum/redhat/7/mongodb-org/4.2/x86_64/RPMS/mongodb-org-server-4.2.11-1.el7.x86_64.rpm -# ubuntu: https://repo.mongodb.org/apt/ubuntu/dists/bionic/mongodb-org/4.2/multiverse/binary-amd64/mongodb-org-server_4.2.11_amd64.deb -# check_alerts: -# centos: -# - event: install_package -# name: "mongodb" -# version: "4.2.11" -# ubuntu: -# - event: install_package -# name: "mongodb" -# version: "4.2.11" -# body: -# tasks: -# - install_package: -# package: -# centos: https://repo.mongodb.org/yum/redhat/7/mongodb-org/4.4/x86_64/RPMS/mongodb-org-server-4.4.10-1.el7.x86_64.rpm -# ubuntu: https://repo.mongodb.org/apt/ubuntu/dists/focal/mongodb-org/4.4/multiverse/binary-amd64/mongodb-org-server_4.4.10_amd64.deb -# check_alerts: -# centos: -# - event: upgrade_package -# name: "mongodb" -# version: "4.4.10" -# ubuntu: -# - event: upgrade_package -# name: "mongodb" -# version: "4.4.10" -# teardown: -# - remove_package: -# package: -# centos: mongodb -# ubuntu: mongodb - - -# - case: "Vulnerable package that update cease to be vulnerable" -# id: "update_vuln_package_cease_vuln" -# description: "Vulnerable package that update cease to be vulnerable" -# preconditions: -# tasks: -# - install_package: -# package: -# centos: https://master.dl.sourceforge.net/project/webadmin/webmin/2.003/webmin-2.003-1.noarch.rpm -# ubuntu: https://master.dl.sourceforge.net/project/webadmin/webmin/2.003/webmin_2.003_all.deb -# check_alerts: -# centos: -# - event: install_package -# name: "webmin" -# version: "2.003" -# ubuntu: -# - event: install_package -# name: "webmin" -# version: "2.003" -# body: -# tasks: -# - install_package: -# package: -# centos: https://kumisystems.dl.sourceforge.net/project/webadmin/webmin/2.000/webmin-2.000-1.noarch.rpm -# ubuntu: https://altushost-swe.dl.sourceforge.net/project/webadmin/webmin/2.000/webmin_2.000_all.deb -# check_alerts: -# centos: -# - event: upgrade_package -# name: "webmin" -# version: "2.000" -# ubuntu: -# - event: upgrade_package -# name: "webmin" -# version: "2.000" -# teardown: -# - remove_package: -# package: -# centos: webmin -# ubuntu: webmin - - - - -# "Deleting a vulnerable package" - - - -# - case: "Installation of a non-vulnerable package" - - - -# - case: "Updating a non-vulnerable package that becomes vulnerable" - - - -# Updating of a non-vulnerable package that remains non-vulnerable - - - -# Deletion of a non-vulnerable package diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_scans.py index c752fb7e26..1313e7bd50 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_scans.py +++ b/tests/end_to_end/test_vulnerability_detector/test_scans.py @@ -136,24 +136,3 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): # Compare agents_vuln_before_second_scan with state_index_content # To Do # check_vuln_state_index(state_index_content_after_second_scan, agents_vuln_before_second_scan) - - -# @pytest.mark.dependency(depends=["test_syscollector_initial_scans"]) -@pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) -def test_vulnerability_detector_scans(preconditions, body, teardown, setup, host_manager): - """ - """ - # Launch tests tasks - launch_parallel_operations(body['tasks'], host_manager) - - # # Check vulnerability - # agents_vuln_after_second_scan = get_agents_vulnerabilities(host_manager) - - # Check alert in Wazuh Indexer - # monitoring_data = generate_monitoring_alerts_all_agent(host_manager, body['check_alerts']) - # expected_alerts = body['check_agent_alert_indexer'] - - # Check agent System state - # To Do - # results = monitoring_events_multihost(host_manager, monitoring_data) - # assert all(results.values()), f"Expected message was not triggered for some agents, {results}" From 1ebf9d6f67bb99cdca834a32be261f86bff7f985 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 20 Nov 2023 18:54:33 +0000 Subject: [PATCH 014/174] refac: remove non-used fixtures --- .../test_vulnerability_detector/conftest.py | 42 ----- .../inventory.yaml | 171 ++++++++++++++++++ 2 files changed, 171 insertions(+), 42 deletions(-) create mode 100644 tests/end_to_end/test_vulnerability_detector/inventory.yaml diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index db0368f310..bcf356a419 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -9,45 +9,3 @@ def host_manager(request): manager = HostManager(inventory_path) return manager - -@pytest.fixture(scope='function') -def setup(preconditions, teardown, host_manager): - """ - """ - host_manager = host_manager - - if preconditions: - launch_parallel_operations(preconditions['tasks'], host_manager) - - # if 'check_alerts' in preconditions: - # monitoring_data = {} - - # for agent in host_manager.get_group_hosts('agent'): - # host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] - # check_alerts_data = preconditions['check_alerts'][host_os_name] - - # for event in check_alerts_data: - # if not host_manager.get_host_variables(agent)['manager'] in monitoring_data: - # monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] - - # if not 'parameters' in event: - # event['parameters'] = {} - # event['parameters']['HOST_NAME'] = agent - - # regex = get_event_regex(event) - - # monitoring_element = { - # 'regex': regex, - # 'path': '/var/ossec/logs/alerts/alerts.json', - # 'timeout': 30, - # 'parameters': event['parameters'] - # } - # monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) - - # monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) - # monitoring_events_multihost(host_manager, monitoring_data) - - yield - - if teardown: - launch_parallel_operations(teardown, host_manager) diff --git a/tests/end_to_end/test_vulnerability_detector/inventory.yaml b/tests/end_to_end/test_vulnerability_detector/inventory.yaml new file mode 100644 index 0000000000..38781e6ef5 --- /dev/null +++ b/tests/end_to_end/test_vulnerability_detector/inventory.yaml @@ -0,0 +1,171 @@ +manager: + hosts: + manager1: + ip: 172.31.12.151 + version: 4.6.0 + revision: 1 + repository: live + type: master + os: ubuntu_22 + service: EC2 + resources: + cpu: 4 + memory: 8192 + ansible_host: 172.31.12.151 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + manager2: + ip: 172.31.15.124 + version: 4.6.0 + revision: 1 + repository: live + type: worker + os: ubuntu_22 + service: EC2 + resources: + cpu: 2 + memory: 4096 + ansible_host: 172.31.15.124 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + vars: {} +filebeat: + hosts: + manager1: + ip: 172.31.12.151 + os: ubuntu_22 + service: EC2 + resources: + cpu: 4 + memory: 8192 + ansible_host: 172.31.12.151 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + manager2: + ip: 172.31.15.124 + os: ubuntu_22 + service: EC2 + resources: + cpu: 2 + memory: 4096 + ansible_host: 172.31.15.124 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + vars: {} +indexer: + hosts: + manager1: + ip: 172.31.12.151 + version: 4.6.0 + revision: 1 + repository: live + os: ubuntu_22 + service: EC2 + resources: + cpu: 4 + memory: 8192 + ansible_host: 172.31.12.151 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + indexer_user: admin + indexer_password: changeme + vars: {} +agent: + children: + linux: + hosts: + ip-172-31-4-181.ec2.internal: + ip: 172.31.4.181 + version: 4.6.0 + revision: 1 + repository: live + manager: manager1 + os: centos_7 + service: EC2 + ansible_host: 172.31.4.181 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.11 + manager_ip: 172.31.12.151 + arch: amd64 + ip-172-31-13-183: + ip: 172.31.13.183 + version: 4.6.0 + revision: 1 + repository: live + manager: manager1 + os: ubuntu_22 + service: EC2 + ansible_host: 172.31.13.183 + ansible_user: qa + ansible_connection: ssh + os_name: linux + ansible_become: 'true' + ansible_python_interpreter: /usr/local/bin/python3.10 + manager_ip: 172.31.12.151 + arch: amd64 + + macos: + hosts: + macos_1200: + ip: 10.10.0.251 + version: 4.6.0 + revision: 1 + repository: live + manager: manager1 + os: macos + service: vagrant + ansible_host: 10.10.0.251 + ansible_port: 30048 + ansible_password: vagrant + ansible_user: vagrant + ansible_connection: ssh + os_name: macos + ansible_become: 'true' + ansible_python_interpreter: /usr/bin/python3 + manager_ip: 172.31.12.151 + arch: amd64 + solaris: + hosts: {} + windows: + hosts: + DESKTOP-AQ2R8SM: + ip: 172.31.1.101 + version: 4.6.0 + revision: 1 + repository: live + manager: manager2 + os: windows_11 + service: EC2 + ansible_host: 172.31.1.101 + ansible_user: qa + ansible_connection: winrm + os_name: windows + ansible_password: wazuhqa + ansible_winrm_server_cert_validation: ignore + ansible_become: 'false' + ansible_python_interpreter: C:\Users\qa\AppData\Local\Programs\Python\Python311\python.exe + manager_ip: 172.31.15.124 + arch: amd64 +all: + vars: + ansible_ssh_common_args: -o StrictHostKeyChecking=no + ansible_winrm_server_cert_validation: ignore + ansible_ssh_private_key_file: /home/rebits/.ssh/JenkinsEphemeral2.pem \ No newline at end of file From 2031f475cf3bf703b4d26c529aa57fef907f06b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 20 Nov 2023 18:55:32 +0000 Subject: [PATCH 015/174] refact: remove testing inventory --- .../inventory.yaml | 171 ------------------ 1 file changed, 171 deletions(-) delete mode 100644 tests/end_to_end/test_vulnerability_detector/inventory.yaml diff --git a/tests/end_to_end/test_vulnerability_detector/inventory.yaml b/tests/end_to_end/test_vulnerability_detector/inventory.yaml deleted file mode 100644 index 38781e6ef5..0000000000 --- a/tests/end_to_end/test_vulnerability_detector/inventory.yaml +++ /dev/null @@ -1,171 +0,0 @@ -manager: - hosts: - manager1: - ip: 172.31.12.151 - version: 4.6.0 - revision: 1 - repository: live - type: master - os: ubuntu_22 - service: EC2 - resources: - cpu: 4 - memory: 8192 - ansible_host: 172.31.12.151 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - manager2: - ip: 172.31.15.124 - version: 4.6.0 - revision: 1 - repository: live - type: worker - os: ubuntu_22 - service: EC2 - resources: - cpu: 2 - memory: 4096 - ansible_host: 172.31.15.124 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - vars: {} -filebeat: - hosts: - manager1: - ip: 172.31.12.151 - os: ubuntu_22 - service: EC2 - resources: - cpu: 4 - memory: 8192 - ansible_host: 172.31.12.151 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - manager2: - ip: 172.31.15.124 - os: ubuntu_22 - service: EC2 - resources: - cpu: 2 - memory: 4096 - ansible_host: 172.31.15.124 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - vars: {} -indexer: - hosts: - manager1: - ip: 172.31.12.151 - version: 4.6.0 - revision: 1 - repository: live - os: ubuntu_22 - service: EC2 - resources: - cpu: 4 - memory: 8192 - ansible_host: 172.31.12.151 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - indexer_user: admin - indexer_password: changeme - vars: {} -agent: - children: - linux: - hosts: - ip-172-31-4-181.ec2.internal: - ip: 172.31.4.181 - version: 4.6.0 - revision: 1 - repository: live - manager: manager1 - os: centos_7 - service: EC2 - ansible_host: 172.31.4.181 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.11 - manager_ip: 172.31.12.151 - arch: amd64 - ip-172-31-13-183: - ip: 172.31.13.183 - version: 4.6.0 - revision: 1 - repository: live - manager: manager1 - os: ubuntu_22 - service: EC2 - ansible_host: 172.31.13.183 - ansible_user: qa - ansible_connection: ssh - os_name: linux - ansible_become: 'true' - ansible_python_interpreter: /usr/local/bin/python3.10 - manager_ip: 172.31.12.151 - arch: amd64 - - macos: - hosts: - macos_1200: - ip: 10.10.0.251 - version: 4.6.0 - revision: 1 - repository: live - manager: manager1 - os: macos - service: vagrant - ansible_host: 10.10.0.251 - ansible_port: 30048 - ansible_password: vagrant - ansible_user: vagrant - ansible_connection: ssh - os_name: macos - ansible_become: 'true' - ansible_python_interpreter: /usr/bin/python3 - manager_ip: 172.31.12.151 - arch: amd64 - solaris: - hosts: {} - windows: - hosts: - DESKTOP-AQ2R8SM: - ip: 172.31.1.101 - version: 4.6.0 - revision: 1 - repository: live - manager: manager2 - os: windows_11 - service: EC2 - ansible_host: 172.31.1.101 - ansible_user: qa - ansible_connection: winrm - os_name: windows - ansible_password: wazuhqa - ansible_winrm_server_cert_validation: ignore - ansible_become: 'false' - ansible_python_interpreter: C:\Users\qa\AppData\Local\Programs\Python\Python311\python.exe - manager_ip: 172.31.15.124 - arch: amd64 -all: - vars: - ansible_ssh_common_args: -o StrictHostKeyChecking=no - ansible_winrm_server_cert_validation: ignore - ansible_ssh_private_key_file: /home/rebits/.ssh/JenkinsEphemeral2.pem \ No newline at end of file From 07b9c97b0cb1fefcfed208a623f3863435f0023c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 20 Nov 2023 18:56:06 +0000 Subject: [PATCH 016/174] refact: remove non-used vuln-packages module --- .../data/vulnerable_packages.json | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 tests/end_to_end/test_vulnerability_detector/data/vulnerable_packages.json diff --git a/tests/end_to_end/test_vulnerability_detector/data/vulnerable_packages.json b/tests/end_to_end/test_vulnerability_detector/data/vulnerable_packages.json deleted file mode 100644 index 32e6a511ef..0000000000 --- a/tests/end_to_end/test_vulnerability_detector/data/vulnerable_packages.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "rclone-v1.49.5": { - "centos": "https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm", - "ubuntu": "https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb" - } -} \ No newline at end of file From 98734a22c680cf0dd3d72066d9ca1e7ebf5936a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 09:55:44 +0000 Subject: [PATCH 017/174] docs: include test docstrings --- .../test_vulnerability_detector/test_scans.py | 147 +++++++++++++----- 1 file changed, 110 insertions(+), 37 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_scans.py index 1313e7bd50..d13e9eb76a 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_scans.py +++ b/tests/end_to_end/test_vulnerability_detector/test_scans.py @@ -1,6 +1,48 @@ +# Copyright (C) 2015, Wazuh Inc. +# Created by Wazuh, Inc. . +# This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 + +""" +Vulnerability Detector Basic Cases Tests +---------------------------------------- + +Type: End-to-End (E2E) + +Brief: + This module contains basic tests to ensure the proper functionality of the Vulnerability Detector. + The tests validate that initial Syscollector scans, along with installation, removal, and uninstall operations, + trigger Vulnerability Detector scans, generating the expected vulnerabilities. + The verification of vulnerabilities is conducted through the Wazuh Indexer, Agents' state index, and Wazuh API vulnerability endpoints. + Additionally, the tests ensure the consistency of these values. + +Tests: + - test_syscollector_initial_agent_scan: + Validates the initiation of Syscollector scans across all agents in the environment. + Subsequently, it ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment. + The Agent's Vulnerability Indexer index is expected to be updated with the detected vulnerabilities. + +Issue: https://github.com/wazuh/wazuh-qa/issues/4369 + +Components: + - vulnerability_detector + - syscollector + +Environment: e2e_vulnerability_detector + +Tiers: 0 + +Markers: + - e2e + - vulnerability_detector + - tier0 + +ToDo: + - +""" import os import pytest -import yaml + +from typing import Generator, Dict, List from wazuh_testing.tools.configuration import load_configuration_template from wazuh_testing.end_to_end.configuration import backup_configurations, restore_backup, configure_environment @@ -10,38 +52,27 @@ from wazuh_testing.end_to_end.waiters import wait_until_vd_is_updated, wait_until_vuln_scan_agents_finished from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_all_agent, monitoring_events_multihost from wazuh_testing.end_to_end.regex import get_event_regex -from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations +from wazuh_testing.end_to_end.indexer_api import get_indexer_values +from wazuh_testing.tools.system import HostManager TIMEOUT_SYSCOLLECTOR_SCAN = 120 +local_path = os.path.dirname(os.path.abspath(__file__)) current_dir = os.path.dirname(__file__) configurations_dir = os.path.join(current_dir, "data", "configurations") -cases = {} -local_path = os.path.dirname(os.path.abspath(__file__)) configurations_paths = { - 'manager': os.path.join(configurations_dir, 'manager.yaml'), - 'agent': os.path.join(configurations_dir, 'agent.yaml') -} -with open(os.path.join(current_dir, 'cases.yaml'), 'r') as cases_file: - cases = yaml.load(cases_file, Loader=yaml.FullLoader) - - -complete_list = [ - ( - case['preconditions'] if 'preconditions' in case else None, - case['body'] if 'body' in case else None, - case['teardown'] if 'teardown' in case else None - ) - for case in cases -] - -dependencies = [None if 'depends' not in case else pytest.mark.depends(name=case['depend']) for case in cases] -list_ids = [ case['id'] for case in cases] - + 'manager': os.path.join(configurations_dir, 'manager.yaml'), + 'agent': os.path.join(configurations_dir, 'agent.yaml') + } def load_vulnerability_detector_configurations(): + """Return the configurations for Vulnerability testing for the agent and manager roles + + Return: + Dict: Configurations for each role + """ return { 'agent': load_configuration_template(configurations_paths['agent'], [{}], [{}]), 'manager': load_configuration_template(configurations_paths['manager'], [{}], [{}]) @@ -49,7 +80,12 @@ def load_vulnerability_detector_configurations(): @pytest.fixture(scope='module') -def setup_vulnerability_tests(host_manager): +def setup_vulnerability_tests(host_manager: HostManager) -> Generator: + """Setup the vulnerability tests environment + + Args: + host_manager (HostManager): An instance of the HostManager class. + """ # Configure managers and agents hosts_configuration_backup = backup_configurations(host_manager) configure_environment(host_manager, load_vulnerability_detector_configurations()) @@ -72,16 +108,58 @@ def setup_vulnerability_tests(host_manager): restore_backup(host_manager, hosts_configuration_backup) -def check_vuln_state_index(agents_vulnerabilities_index_value, agents_vulnerabilities_api_value): - """Check that agents vulnerabilities match with index state values +def check_vuln_state_index(host_manager: HostManager, agents_vulnerabilities_api_value: Dict[str, List[Dict]]): + """Check the consistency of the vulnerabilities included in the indexer. + + This function ensures that the vulnerabilities gathered from the Wazuh API are included in the + Agent's Vulnerabilities index. + + Args: + host_manager (HostManager): Host manager of the environment. + agents_vulnerabilities_api_value (Dict[str, List[str]]): A dictionary containing the list of vulnerabilities + for each agent, where keys are agent names and values are lists of strings representing the vulnerabilities. + + Raises: + AssertionError: If inconsistencies are found between the API values and the state of the indexer. """ - # To Do + # Get the indexer values + state_index_content_before_second_scan = get_indexer_values(host_manager) + + # To Do: Ensure consistency of the agent_vulnerabilities_api_value and state_index_content_before_second_scan pass @pytest.mark.dependency() def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): """ + description: Validates the initiation of Syscollector scans across all agents in the environment. + + This test ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment. + The Agent's Vulnerability Indexer index is expected to be updated with the detected vulnerabilities. + + tier: 0 + + parameters: + - host_manager: + type: fixture + brief: Get the host manager of the environment + - setup_vulnerability_tests: + type: fixture + brief: Setup the environment to proceed with the testing + + assertions: + - Verify that syscollector scan is started after agent start + - Verify that Vulnerability scan is performed for all the agent + - Verify that vulnerabilities are generated for each agent (Check vulnerabilities using Wazuh API) + - Verify that Agent's Vulnerabilities index is updated with the agent vulnerabilities, being conssitent with the API results + - Verify that second scan is performed in expected timeframe + - Verify that no new vulnearbilities are detected since the first scan + + cases: None + + tags: + - syscollector + - vulnerability_detector """ # Monitor for the first Syscollector scan in all the agents monitoring_data = generate_monitoring_logs_all_agent(host_manager, @@ -102,13 +180,11 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): for agent, vuln in agents_vuln_before_second_scan.items(): assert vuln, f"No vulnerabilities were detected for agent {agent}" - # Check Agent's System states are stored - # To Do - # state_index_content_before_second_scan = get_vuln_state_value(host_manager) + # Get agent's vulnerabilities + agents_vuln_before_second_scan = get_agents_vulnerabilities(host_manager) # Compare agents_vuln_before_second_scan with state_index_content - # To Do - # check_vuln_state_index(state_index_content_before_second_scan, agents_vuln_before_second_scan) + check_vuln_state_index(host_manager, agents_vuln_before_second_scan) # Truncate manager_logs to prevent trigger wait_until_vuln_scan_agents_finished wrongly truncate_managers_logs(host_manager) @@ -130,9 +206,6 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): assert agents_vuln_before_second_scan == agents_vuln_after_second_scan - # Check Agent's System states are stored - # state_index_content_after_second_scan = get_vuln_state_value(host_manager) + # Compare agents_vuln_after_second_scan with state_index_content + check_vuln_state_index(host_manager, agents_vuln_after_second_scan) - # Compare agents_vuln_before_second_scan with state_index_content - # To Do - # check_vuln_state_index(state_index_content_after_second_scan, agents_vuln_before_second_scan) From ea79262e82d50b5aa09650cb87c531ebfc9d727a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 09:58:03 +0000 Subject: [PATCH 018/174] refac: logs path variable --- .../wazuh_testing/end_to_end/__init__.py | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py b/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py index df24fe5835..af7de99c04 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py @@ -11,15 +11,23 @@ fetched_alerts_json_path = os.path.join(gettempdir(), 'alerts.json') + +base_path = { + 'linux': '/var/ossec', + 'windows': r'C:\Program Files (x86)\ossec-agent', + 'macos': '/Library/Ossec' +} + configuration_filepath_os = { - 'linux': '/var/ossec/etc/ossec.conf', - 'windows': r'C:\\Program Files (x86)\\ossec-agent\\ossec.conf', - 'macos': '/Library/Ossec/etc/ossec.conf' + 'linux': os.path.join(base_path['linux'], 'etc', 'ossec.conf'), + 'windows': os.path.join(base_path['windows'], 'ossec.conf'), + 'macos': os.path.join(base_path['macos'], 'etc', 'ossec.conf') } + logs_filepath_os = { - 'linux': '/var/ossec/logs/ossec.log', - 'windows': r'C:\\Program Files (x86)\\ossec-agent\\ossec.log', - 'macos': '/Library/Ossec/logs/ossec.log' + 'linux': os.path.join(base_path['linux'], 'logs', 'ossec.log'), + 'windows': os.path.join(base_path['windows'], 'ossec.log'), + 'macos': os.path.join(base_path['macos'], 'logs', 'ossec.log') } From be7a936abc6824803feb9b54ff11405822b0d408 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 10:32:49 +0000 Subject: [PATCH 019/174] refac: e2e modules and restore IT monitoring --- .../wazuh_testing/end_to_end/configuration.py | 13 +++--- .../wazuh_testing/end_to_end/indexer_api.py | 9 +++- .../wazuh_testing/end_to_end/logs.py | 11 ++--- .../wazuh_testing/end_to_end/monitoring.py | 45 ++++++++----------- .../wazuh_testing/end_to_end/regex.py | 36 +++++++++++---- .../wazuh_testing/end_to_end/services.py | 4 -- .../wazuh_testing/end_to_end/waiters.py | 35 ++++++++++++--- .../vulnerability_detector/event_monitor.py | 2 +- .../wazuh_testing/tools/WazuhHandler.py | 0 .../wazuh_testing/wazuh_testing/tools/file.py | 24 +++++++--- .../wazuh_testing/tools/monitoring.py | 10 +---- .../wazuh_testing/tools/system.py | 1 - 12 files changed, 115 insertions(+), 75 deletions(-) create mode 100644 deps/wazuh_testing/wazuh_testing/tools/WazuhHandler.py diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py index 360f37a6eb..cc1c24dbff 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -1,3 +1,6 @@ +# Copyright (C) 2015, Wazuh Inc. +# Created by Wazuh, Inc. . +# This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ Module Name: configuration.py @@ -6,19 +9,15 @@ and related tools. Functions: - 1. backup_configurations(host_manager: HostManager) -> dict: + - backup_configurations(host_manager: HostManager) -> dict: Backup configurations for all hosts in the specified host manager. - 2. restore_backup(host_manager: HostManager, backup_configurations: dict) -> None: + - restore_backup(host_manager: HostManager, backup_configurations: dict) -> None: Restore configurations for all hosts in the specified host manager. - 3. configure_environment(host_manager: HostManager, configurations: dict) -> None: + - configure_environment(host_manager: HostManager, configurations: dict) -> None: Configure the environment for all hosts in the specified host manager. This function uses ThreadPool to parallelize the configuration process. - -Module Usage: - This module can be used to manage configurations for a distributed system, with functions for - backup, restore, and parallelized environment configuration. """ from multiprocessing.pool import ThreadPool import xml.dom.minidom diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index c4e2fd8036..0fa4e1a09a 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -1,3 +1,7 @@ +# Copyright (C) 2015, Wazuh Inc. +# Created by Wazuh, Inc. . +# This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 + """ Module Name: indexer_api @@ -26,6 +30,7 @@ vulnerability states. """ import requests +from typing import Dict from wazuh_testing.tools.system import HostManager @@ -33,7 +38,7 @@ STATE_INDEX_NAME = 'wazuh-vulnerabilities-states' -def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': 'admin', 'password': 'changeme'}, index: str = 'wazuh-alerts*') -> str: +def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': 'admin', 'password': 'changeme'}, index: str = 'wazuh-alerts*') -> Dict: """ Get values from the Wazuh Elasticsearch indexer API. @@ -54,5 +59,5 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' } response = requests.get(url=url, params={'pretty': 'true'}, json=query, verify=False, auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password'])) - return response.text + return response.json() diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py index 690ff26aa8..36e55bb3f0 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py @@ -5,32 +5,29 @@ This module provides functions for truncating logs and alerts for Wazuh agents and managers. Functions: - 1. truncate_agents_logs(host_manager: HostManager) -> None: + - truncate_agents_logs(host_manager: HostManager) -> None: Truncate logs for Wazuh agents. Args: host_manager: An instance of the HostManager class containing information about hosts. - 2. truncate_managers_logs(host_manager: HostManager) -> None: + - truncate_managers_logs(host_manager: HostManager) -> None: Truncate logs for Wazuh managers. Args: host_manager: An instance of the HostManager class containing information about hosts. - 3. truncate_logs(host_manager: HostManager) -> None: + - truncate_logs(host_manager: HostManager) -> None: Truncate logs for both Wazuh agents and managers. Args: host_manager: An instance of the HostManager class containing information about hosts. - 4. truncate_alerts(host_manager: HostManager) -> None: + - truncate_alerts(host_manager: HostManager) -> None: Truncate Wazuh alerts. Args: host_manager: An instance of the HostManager class containing information about hosts. - -Module Usage: - This module can be used to truncate logs and alerts for Wazuh agents and managers. """ from wazuh_testing.end_to_end import logs_filepath_os diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index bc1f5a743f..22d3c54a2a 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -1,38 +1,32 @@ """ -Module Name: wazuh_monitoring +Module Name: monitoring Description: This module provides functions for monitoring events, files, and alerts in a Wazuh environment. -Imports: - - os - - tempfile - - re - - sleep - - requests - - ThreadPool - Functions: - 1. monitoring_events_host_monitoring(host_manager: HostManager, monitoring_data: dict) -> dict: + - monitoring_events_host_monitoring(host_manager: HostManager, monitoring_data: dict) -> dict: Monitor events on hosts using the HostMonitor. - 2. monitoring_events_multihost(host_manager: HostManager, monitoring_data: dict) -> None: + - monitoring_events_multihost(host_manager: HostManager, monitoring_data: dict) -> None: Monitor events on multiple hosts concurrently. - 3. generate_monitoring_logs_all_agent(host_manager: HostManager, regex_list: list, timeout_list: list) -> dict: + - generate_monitoring_logs_all_agent(host_manager: HostManager, regex_list: list, timeout_list: list) -> dict: Generate monitoring data for logs on all agent hosts. - 4. generate_monitoring_logs_manager(host_manager: HostManager, manager: str, regex: str, timeout: int) -> dict: + - generate_monitoring_logs_manager(host_manager: HostManager, manager: str, regex: str, timeout: int) -> dict: Generate monitoring data for logs on a specific manager host. - 5. generate_monitoring_alerts_all_agent(host_manager: HostManager, events_metadata: dict) -> dict: + - generate_monitoring_alerts_all_agent(host_manager: HostManager, events_metadata: dict) -> dict: Generate monitoring data for alerts on all agent hosts. """ import tempfile import re from time import sleep +from typing import Dict, List from multiprocessing.pool import ThreadPool + from wazuh_testing.end_to_end import logs_filepath_os from wazuh_testing.tools.file import create_temp_file from wazuh_testing.tools.monitoring import HostMonitor @@ -40,9 +34,8 @@ from wazuh_testing.tools.system import HostManager -def monitoring_events_host_monitoring(host_manager: HostManager, monitoring_data: dict) -> dict: - """ - Monitor events on hosts using the HostMonitor class. +def monitoring_events_host_monitoring(host_manager: HostManager, monitoring_data: Dict) -> Dict: + """Monitor events on hosts using the HostMonitor class. Args: host_manager: An instance of the HostManager class containing information about hosts. @@ -73,7 +66,7 @@ def monitoring_events_host_monitoring(host_manager: HostManager, monitoring_data return results -def monitoring_events_multihost(host_manager: HostManager, monitoring_data: dict) -> None: +def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict) -> None: """ Monitor events on multiple hosts concurrently. @@ -81,17 +74,17 @@ def monitoring_events_multihost(host_manager: HostManager, monitoring_data: dict host_manager: An instance of the HostManager class containing information about hosts. monitoring_data: A dictionary containing monitoring data for each host. """ - def monitoring_event(host_manager, host, monitoring_elements): + def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict]): """ Monitor the specified elements on a host. - Parameters: - - host_manager: An object managing hosts. - - host: The target host. - - monitoring_elements: A list of dictionaries containing regex, timeout, and file. - Returns: - - The first match found in the file content. + + Args: + host_manager (HostManager): Host Manager to handle the environment + host (str): The target host. + monitoring_elements(List): A list of dictionaries containing regex, timeout, and file. + Raises: - - TimeoutError if no match is found within the specified timeout. + TimeoutError: If no match is found within the specified timeout. """ for element in monitoring_elements: regex, timeout, monitoring_file = element['regex'], element['timeout'], element['file'] diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py index 3f6170d386..9158a6e033 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py @@ -1,10 +1,12 @@ +from typing import Dict -regex = { + +REGEX_PATTERNS = { 'syscollector_scan_start': { 'regex': '.*INFO: Starting evaluation.' }, 'syscollector_scan_end': { - 'regex': '.*INFO: Starting evaluation.' + 'regex': '.*INFO: Ending evaluation.' }, 'syscollector_install_package_alert_yum': { 'regex': '.*installed.*agent".*"name":"(\S+)".*Installed: (\S+).*?(\S+)', @@ -18,24 +20,40 @@ 'regex': '.*Yum package updated.*agent".*"name":"(\S+)".*Updated: (\S+).*?(\S+)', 'parameters': ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] }, - 'vulnerability_alert':{ + 'vulnerability_alert': { 'regex': '.*HOST_NAME.*package":.*name":"PACKAGE_NAME".*version":"PACKAGE_VERSION".*"architecture":"ARCHITECTURE.*"cve":"CVE"', 'parameters': ['HOST_NAME', 'CVE', 'PACKAGE_NAME', 'PACKAGE_VERSION', 'ARCHITECTURE'] } } -def get_event_regex(event): +def get_event_regex(event: Dict): """ + Get the regex pattern for a specific event. + + Args: + event (dict): Dictionary containing the event information. + + Returns: + str: The regex pattern for the specified event. + + Raises: + Exception: If required parameters are missing. """ - expected_event = regex[event['event']] + expected_event = REGEX_PATTERNS.get(event['event']) + + if expected_event is None: + raise Exception(f"Invalid event: {event['event']}") + expected_regex = expected_event['regex'] - if 'parameters' in expected_event and not 'parameters' in event: - raise Exception(f"Not provided enaugh data to create regex. Missing {event['PARAMETERS']}") + if 'parameters' in expected_event and 'parameters' not in event: + raise Exception(f"Not provided enough data to create regex. Missing {expected_event['parameters']}") + elif 'parameters' in event: for parameter in expected_event['parameters']: - expected_regex = expected_regex.replace(parameter, event['parameters'][parameter]) - + expected_regex = expected_regex.replace(parameter, event['parameters'].get(parameter, '')) return expected_regex + + diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/services.py b/deps/wazuh_testing/wazuh_testing/end_to_end/services.py index aa0e4182e4..e69de29bb2 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/services.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/services.py @@ -1,4 +0,0 @@ -def control_environment(host_manager, operation, group_list): - for group in group_list: - for host in host_manager.get_group_hosts(group): - host_manager.handle_wazuh_services(host, operation) \ No newline at end of file diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index b2b47beea8..2d70bdd51e 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -1,18 +1,43 @@ -from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_manager, monitoring_events_multihost +from wazuh_testing.end_to_end.monitoring import ( + generate_monitoring_logs_manager, + monitoring_events_multihost +) from wazuh_testing.end_to_end.wazuh_api import get_agents_id +from wazuh_testing.tools.system import HostManager -def wait_until_vd_is_updated(host_manager): +def wait_until_vd_is_updated(host_manager: HostManager) -> None: + """ + Wait until the vulnerability data is updated for all manager hosts. + + Args: + host_manager (HostManager): Host manager instance to handle the environment. + """ monitoring_data = {} + for manager in host_manager.get_group_hosts('manager'): - monitoring_data = generate_monitoring_logs_manager(host_manager, manager, 'Starting vulnerability scan', 800) + monitoring_data = generate_monitoring_logs_manager( + host_manager, manager, 'Starting vulnerability scan', 800 + ) monitoring_events_multihost(host_manager, monitoring_data) -def wait_until_vuln_scan_agents_finished(host_manager): +def wait_until_vuln_scan_agents_finished(host_manager: HostManager) -> None: + """ + Wait until vulnerability scans for all agents are finished. + + Args: + host_manager (HostManager): Host manager instance to handle the environment. + """ for agent in host_manager.get_group_hosts('agent'): manager_host = host_manager.get_host_variables(agent)['manager'] agents_id = get_agents_id(host_manager) - monitoring_data = generate_monitoring_logs_manager(host_manager, manager_host, rf"Finished vulnerability assessment for agent '{agents_id[agent]}'", 700) + agent_id = agents_id.get(agent, '') + finished_scan_pattern = rf"Finished vulnerability assessment for agent '{agent_id}'" + + monitoring_data = generate_monitoring_logs_manager( + host_manager, manager_host, finished_scan_pattern, 700 + ) + monitoring_events_multihost(host_manager, monitoring_data) diff --git a/deps/wazuh_testing/wazuh_testing/modules/vulnerability_detector/event_monitor.py b/deps/wazuh_testing/wazuh_testing/modules/vulnerability_detector/event_monitor.py index 0a5c843593..bb3c76ea04 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/vulnerability_detector/event_monitor.py +++ b/deps/wazuh_testing/wazuh_testing/modules/vulnerability_detector/event_monitor.py @@ -167,7 +167,7 @@ def check_vulnerability_full_scan_end(log_monitor=None, agent_id='000'): agent_id (str): Agent ID. """ check_vuln_detector_event(file_monitor=log_monitor, timeout=vd.T_40, - callback=rf"Finished vulnerability assessment for agent \\'{agent_id}\\'", + callback=f"Finished vulnerability assessment for agent '{agent_id}'", error_message='No full scan end has been detected in the log.') diff --git a/deps/wazuh_testing/wazuh_testing/tools/WazuhHandler.py b/deps/wazuh_testing/wazuh_testing/tools/WazuhHandler.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/deps/wazuh_testing/wazuh_testing/tools/file.py b/deps/wazuh_testing/wazuh_testing/tools/file.py index 8f41b4059f..269c6cac48 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/file.py +++ b/deps/wazuh_testing/wazuh_testing/tools/file.py @@ -926,8 +926,22 @@ def modify_file(path, name, new_content=None, is_binary=False): modify_file_win_attributes(path, name) -def create_temp_file(content): - fd, temp_file_path = tempfile.mkstemp(text=True) # 'text=True' specifies text mode - with open(temp_file_path, 'w', newline='\n') as temp_file: - temp_file.write(content) - return temp_file_path +def create_temp_file(content: str) -> str: + """ + Create a temporary file with the specified content. + + Args: + content (str): The content to be written to the temporary file. + + Returns: + str: The path to the created temporary file. + """ + try: + fd, temp_file_path = tempfile.mkstemp(text=True) + with os.fdopen(fd, 'w', newline='\n') as temp_file: + temp_file.write(content) + return temp_file_path + except Exception as e: + print(f"Error creating temporary file: {e}") + raise + diff --git a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py index 30342aa3e1..87e0472c3f 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py @@ -432,7 +432,6 @@ def get_results(self, callback=_callback_default, accum_results=1, timeout=-1, u else: msg = self._queue.peek(position=position, block=True, timeout=self._time_step) position += 1 - print(f"Monitoring line {msg}") item = callback(msg) logging.debug(msg) if item is not None and item: @@ -917,12 +916,7 @@ def run(self, update_position=False): if len(monitored_files) == 0: raise AttributeError('There is no path to monitor. Exiting...') for path in monitored_files: - if '\\' in path: - first_path_element = path.split("\\")[-1] - else: - first_path_element = path.split("/")[-1] - - output_path = f'{host}_{first_path_element}.tmp' + output_path = f'{host}_{path.split("/")[-1]}.tmp' self._file_content_collectors.append(self.file_composer(host=host, path=path, output_path=output_path)) logger.debug(f'Add new file composer process for {host} and path: {path}') self._file_monitors.append(self._start(host=host, @@ -937,7 +931,7 @@ def run(self, update_position=False): for file_collector in self._file_content_collectors: file_collector.terminate() file_collector.join() - # self.clean_tmp_files() + self.clean_tmp_files() break time.sleep(self._time_step) self.check_result() diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 4fd41e0e74..a6023aded5 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -6,7 +6,6 @@ import tempfile import xml.dom.minidom as minidom from typing import Union -import base64 import testinfra import yaml From 63b87c44f03ab11423d7e1ce204f65358b982929 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 10:51:05 +0000 Subject: [PATCH 020/174] docs: improve indexer api documentation --- .../wazuh_testing/end_to_end/indexer_api.py | 40 ++++++++----------- .../test_vulnerability_detector/conftest.py | 39 +++++++++++++++++- 2 files changed, 54 insertions(+), 25 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index 0fa4e1a09a..18d0f96a39 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -1,33 +1,27 @@ -# Copyright (C) 2015, Wazuh Inc. -# Created by Wazuh, Inc. . -# This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 - """ -Module Name: indexer_api - -Description: - This module provides functions for interacting with the Wazuh Elasticsearch indexer API, - specifically for retrieving vulnerability state information. +Wazuh Elasticsearch Indexer Module. +----------------------------------- -Constants: - - STATE_INDEX_NAME: The name of the index used for storing Wazuh vulnerabilities states. +This module provides functions to interact with the Wazuh Elasticsearch indexer API. Functions: - 1. get_indexer_values(host_manager, credentials={'user': 'admin', 'password': 'changeme'}, index='wazuh-alerts*') -> str: - Get values from the Wazuh Elasticsearch indexer API. + - get_indexer_values: Retrieves values from the Elasticsearch indexer API. + +Usage Example: + import requests + from typing import Dict + from wazuh_testing.tools.system import HostManager - Args: - host_manager: An instance of the HostManager class containing information about hosts. - credentials (Optional): A dictionary containing the Elasticsearch credentials. Defaults to - {'user': 'admin', 'password': 'changeme'}. - index (Optional): The Elasticsearch index name. Defaults to 'wazuh-alerts*'. + # Usage of get_indexer_values + host_manager = HostManager() + credentials = {'user': 'admin', 'password': 'changeme'} + index = 'wazuh-alerts*' + response_data = get_indexer_values(host_manager, credentials, index) - Returns: - str: The response text from the indexer API. -Module Usage: - This module can be used to retrieve information from the Wazuh Elasticsearch indexer API, specifically for - vulnerability states. +Copyright (C) 2015, Wazuh Inc. +Created by Wazuh, Inc. . +This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ import requests from typing import Dict diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index bcf356a419..fbee965f15 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -1,10 +1,45 @@ +""" +Pytest Configuration for Wazuh Vulnerability Detector End-to-End Testing + +This module contains Pytest fixtures and configuration settings for Wazuh end-to-end testing. +It provides reusable fixtures and setup that can be shared across multiple test modules. + +Fixtures: + - `host_manager`: Fixture for creating a HostManager instance representing the Wazuh test environment. + +Configuration Options: + - `--inventory-path`: Path to the inventory file specifying the test environment hosts. + +Example: + To use the `host_manager` fixture in your test module: + + ```python + def test_example(host_manager): + # Your test logic using the host_manager fixture + pass + ``` +""" import pytest from wazuh_testing.tools.system import HostManager -from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations -@pytest.fixture(scope='module') + +@pytest.fixture(scope='session') def host_manager(request): + """Fixture for creating a HostManager instance. + + This fixture initializes and provides a HostManager instance for testing purposes. The HostManager + is set up at the session scope, ensuring it is created only once for the entire test suite. + + Args: + request (FixtureRequest): The Pytest fixture request object. + + Returns: + HostManager: An instance of the HostManager class representing the Wazuh test environment. + + Raises: + ValueError: If the specified inventory path is invalid or not provided. + """ inventory_path = request.config.getoption('--inventory-path') manager = HostManager(inventory_path) From 69642ab6f3792a00c78422131973a68226cc8f3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 10:57:40 +0000 Subject: [PATCH 021/174] feat: include control environment in the host manager class --- deps/wazuh_testing/wazuh_testing/tools/system.py | 7 +++++++ .../test_vulnerability_detector/test_scans.py | 10 +++++----- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index a6023aded5..4a1f5d0de4 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -504,6 +504,11 @@ def handle_wazuh_services(self, host, operation): binary_path = f"/Library/Ossec/bin/wazuh-control" self.get_host(host).ansible('shell', f"{binary_path} {operation}", check=False) + def control_environment(self, operation, group_list): + for group in group_list: + for host in self.get_group_hosts(group): + self.handle_wazuh_services(host, operation) + def clean_environment(host_manager, target_files): """Clears a series of files on target hosts managed by a host manager @@ -513,3 +518,5 @@ def clean_environment(host_manager, target_files): """ for target in target_files: host_manager.clear_file(host=target[0], file_path=target[1]) + + diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_scans.py index d13e9eb76a..5a31449081 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_scans.py +++ b/tests/end_to_end/test_vulnerability_detector/test_scans.py @@ -37,7 +37,8 @@ - tier0 ToDo: - - + - check_vuln_state_index: Function to ensure the consitency of the agent's vulnerabilities + - Check if wait_until_vuln_scan_agents_finished function works as expected for the second scan """ import os import pytest @@ -46,7 +47,6 @@ from wazuh_testing.tools.configuration import load_configuration_template from wazuh_testing.end_to_end.configuration import backup_configurations, restore_backup, configure_environment -from wazuh_testing.end_to_end.services import control_environment from wazuh_testing.end_to_end.logs import truncate_agents_logs, truncate_managers_logs, truncate_logs from wazuh_testing.end_to_end.wazuh_api import get_agents_vulnerabilities from wazuh_testing.end_to_end.waiters import wait_until_vd_is_updated, wait_until_vuln_scan_agents_finished @@ -91,8 +91,8 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: configure_environment(host_manager, load_vulnerability_detector_configurations()) # Restart managers and stop agents - control_environment(host_manager, 'stop', ['agent']) - control_environment(host_manager, 'restart', ['manager']) + host_manager.control_environment('stop', ['agent']) + host_manager.control_environment('restart', ['manager']) # Wait until VD is updated wait_until_vd_is_updated(host_manager) @@ -101,7 +101,7 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: truncate_logs(host_manager) # Start agents - control_environment(host_manager, 'start', ['agent']) + host_manager.control_environment('start', ['agent']) yield From a3cdad77d91492864210701ae697bfb638aab9e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 10:59:55 +0000 Subject: [PATCH 022/174] refac: remove unnused Debian ansible indexer task --- .../roles/wazuh/wazuh-indexer/tasks/Debian.yml | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/provisioning/roles/wazuh/wazuh-indexer/tasks/Debian.yml b/provisioning/roles/wazuh/wazuh-indexer/tasks/Debian.yml index 725b8d55f9..4eccabee54 100644 --- a/provisioning/roles/wazuh/wazuh-indexer/tasks/Debian.yml +++ b/provisioning/roles/wazuh/wazuh-indexer/tasks/Debian.yml @@ -1,14 +1,4 @@ ---- - -- name: Install - apt: - update_cache: yes - register: update_result - until: update_result is success - retries: 3 - delay: 10 - - name: Update cache apt: update_cache: yes From 3dafc39deebdc746f30f8f828b8a6ba600ed4995 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 11:10:25 +0000 Subject: [PATCH 023/174] docs: include docstring for some HostManager methods --- .../wazuh_testing/tools/system.py | 131 +++++++++++++----- 1 file changed, 100 insertions(+), 31 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 4a1f5d0de4..bebe5f015f 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -434,77 +434,147 @@ def configure_local_internal_options(self, local_internal_options: dict): def download_file(self, host, url, dest_path, mode='755'): """ - - name: Download foo.conf - ansible.builtin.get_url: - url: http://example.com/path/file.conf - dest: /etc/foo.conf - mode: '0440' + Downloads a file from the specified URL to the destination path on the specified host. + + Args: + host (str): The target host on which to download the file. + url (str): The URL of the file to be downloaded. + dest_path (str): The destination path where the file will be saved on the host. + mode (str, optional): The file permissions mode. Defaults to '755'. + + Returns: + dict: Ansible result of the download operation. + + Example: + host_manager.download_file('my_host', 'http://example.com/path/file.conf', '/etc/foo.conf', mode='0440') """ - a = self.get_host(host).ansible("get_url", f"url={url} dest={dest_path} mode={mode}", check=False) - return a + result = self.get_host(host).ansible("get_url", f"url={url} dest={dest_path} mode={mode}", check=False) + + return result def install_package(self, host, url, system='ubuntu'): + """ + Installs a package on the specified host. + + Args: + host (str): The target host on which to install the package. + url (str): The URL or name of the package to be installed. + system (str, optional): The operating system type. Defaults to 'ubuntu'. + Supported values: 'windows', 'ubuntu', 'centos'. + + Returns: + Dict: Testinfra Ansible Response of the operation + + Example: + host_manager.install_package('my_host', 'http://example.com/package.deb', system='ubuntu') + """ result = False + if system =='windows': - a = self.get_host(host).ansible("win_package", f"path={url} arguments=/S", check=False) - print(a) + result = self.get_host(host).ansible("win_package", f"path={url} arguments=/S", check=False) elif system == 'ubuntu': - a = self.get_host(host).ansible("apt", f"deb={url}", check=False) - if a['changed'] == True and a['stderr'] == '': + result = self.get_host(host).ansible("apt", f"deb={url}", check=False) + if result['changed'] == True and result['stderr'] == '': result = True - print(a) elif system == 'centos': - a = self.get_host(host).ansible("yum", f"name={url} state=present sslverify=false disable_gpg_check=True", check=False) - if 'rc' in a and a['rc'] == 0 and a['changed'] == True: + result = self.get_host(host).ansible("yum", f"name={url} state=present sslverify=false disable_gpg_check=True", check=False) + if 'rc' in result and result['rc'] == 0 and result['changed'] == True: result = True - print(a) + + return result def get_master_ip(self): """ + Retrieves the IP address of the master node from the inventory. + + Returns: + str: The IP address of the master node, or None if not found. + Example: + master_ip = host_manager.get_master_ip() """ master_ip = None + for manager in self.get_group_hosts('manager'): if 'type' in self.get_host_variables(manager) and \ self.get_host_variables(manager)['type'] == 'master': master_ip = self.get_host_variables(manager)['ip'] + return master_ip def remove_package(self, host, package_name, system): + """ + Removes a package from the specified host. + + Args: + host (str): The target host from which to remove the package. + package_name (str): The name of the package to be removed. + system (str): The operating system type. + Supported values: 'windows', 'ubuntu', 'centos'. + + Returns: + Dict: Testinfra Ansible Response of the operation + + Example: + host_manager.remove_package('my_host', 'my_package', system='ubuntu') + """ result = False + if system == 'windows': - a = self.get_host(host).ansible("win_package", f"path={package_name} state=absent arguments=/S", check=False) + result = self.get_host(host).ansible("win_package", f"path={package_name} state=absent arguments=/S", check=False) elif system == 'ubuntu': - a = self.get_host(host).ansible("apt", f"name={package_name} state=absent", check=False) - if a['changed'] == True and a['stderr'] == '': + result = self.get_host(host).ansible("apt", f"name={package_name} state=absent", check=False) + if result['changed'] == True and result['stderr'] == '': result = True elif system == 'centos': - a = self.get_host(host).ansible("yum", f"name={package_name} state=absent", check=False) - if 'rc' in a and a['rc'] == 0 and a['changed'] == True: + result = self.get_host(host).ansible("yum", f"name={package_name} state=absent", check=False) + if 'rc' in result and result['rc'] == 0 and result['changed'] == True: result = True + return result def handle_wazuh_services(self, host, operation): + """ + Handles Wazuh services on the specified host. + + Args: + host (str): The target host on which to handle Wazuh services. + operation (str): The operation to perform ('start', 'stop', 'restart'). + + Example: + host_manager.handle_wazuh_services('my_host', 'restart') + """ os = self.get_host_variables(host)['os_name'] binary_path = None + result = None + if os == 'windows': if operation == 'restart': - a = self.get_host(host).ansible('ansible.windows.win_shell', f'NET stop Wazuh', check=False) - b = self.get_host(host).ansible('ansible.windows.win_shell', f'NET start Wazuh', check=False) - - print(a) - print(b) + self.get_host(host).ansible('ansible.windows.win_shell', f'NET stop Wazuh', check=False) + self.get_host(host).ansible('ansible.windows.win_shell', f'NET start Wazuh', check=False) else: - a = self.get_host(host).ansible('ansible.windows.win_shell', f'NET {operation} Wazuh', check=False) - print(a) + result = self.get_host(host).ansible('ansible.windows.win_shell', f'NET {operation} Wazuh', check=False) else: if os == 'linux': - binary_path = f"/var/ossec/bin/wazuh-control" + result = binary_path = f"/var/ossec/bin/wazuh-control" elif os == 'macos': - binary_path = f"/Library/Ossec/bin/wazuh-control" - self.get_host(host).ansible('shell', f"{binary_path} {operation}", check=False) + result= binary_path = f"/Library/Ossec/bin/wazuh-control" + + result = self.get_host(host).ansible('shell', f"{binary_path} {operation}", check=False) + + return result def control_environment(self, operation, group_list): + """ + Controls the Wazuh services on hosts in the specified groups. + + Args: + operation (str): The operation to perform on Wazuh services ('start', 'stop', 'restart'). + group_list (list): A list of group names whose hosts' Wazuh services should be controlled. + + Example: + control_environment('restart', ['group1', 'group2']) + """ for group in group_list: for host in self.get_group_hosts(group): self.handle_wazuh_services(host, operation) @@ -519,4 +589,3 @@ def clean_environment(host_manager, target_files): for target in target_files: host_manager.clear_file(host=target[0], file_path=target[1]) - From 086706d698c675bd40903af0ea29a4aa4386ea08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 11:15:39 +0000 Subject: [PATCH 024/174] docs: improve docstring of some host manager methods --- .../wazuh_testing/tools/WazuhHandler.py | 0 .../wazuh_testing/tools/system.py | 62 +++++++++++-------- 2 files changed, 37 insertions(+), 25 deletions(-) delete mode 100644 deps/wazuh_testing/wazuh_testing/tools/WazuhHandler.py diff --git a/deps/wazuh_testing/wazuh_testing/tools/WazuhHandler.py b/deps/wazuh_testing/wazuh_testing/tools/WazuhHandler.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index bebe5f015f..bfb3f7fe7f 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -53,21 +53,27 @@ def get_inventory(self) -> dict: return self.inventory def get_inventory_path(self) -> str: - """Get the loaded Ansible inventory. + """Get the path of the loaded Ansible inventory. Returns: - self.inventory: Ansible inventory + str: Path to the Ansible inventory file. + + Example: + inventory_path = get_inventory_path() """ return self.inventory_path def get_group_hosts(self, pattern='None'): - """Get all hosts from inventory that belong to a group. + """Get all hosts from the inventory that belong to a specified group pattern. Args: - group (str): Group name + pattern (str, optional): Group name or pattern. Defaults to 'None'. Returns: - list: List of hosts + list: List of host names belonging to the specified group pattern. + + Example: + hosts = get_group_hosts('my_group') """ if pattern: return [str(host) for host in self.inventory_manager.get_hosts(pattern=pattern)] @@ -76,20 +82,34 @@ def get_group_hosts(self, pattern='None'): def get_host_groups(self, host): - """ + """Get the list of groups to which the specified host belongs. + + Args: + host (str): Hostname. + + Returns: + list: List of group names to which the host belongs. + + Example: + groups = get_host_groups('my_host') """ group_list = self.inventory_manager.get_host(host).get_groups() + return [str(group) for group in group_list] def get_host_variables(self, host): """Get the variables of the specified host. Args: - host (str): Hostname + host (str): Hostname. Returns: - testinfra.modules.base.Ansible: Host instance from hostspec + testinfra.modules.base.Ansible: Host instance from hostspec. + + Example: + variables = get_host_variables('my_host') """ + inventory_manager_host = self.inventory_manager.get_host(host) return self.hosts_variables[inventory_manager_host] @@ -108,11 +128,12 @@ def get_host(self, host: str): def truncate_file(self, host: str, filepath: str): ansible_command = 'file' if 'os_name' in self.get_host_variables(host): - host_os_name = self.get_host_variables(host)['os_name'] ansible_command = 'win_copy' if self.get_host_variables(host)['os_name'] == 'windows' else 'copy' result = self.get_host(host).ansible(ansible_command, f"dest='{filepath}' content=''", check=False) + return result + def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/ossec.conf', check: bool = False): """Move from src_path to the desired location dest_path for the specified host. @@ -124,17 +145,20 @@ def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/o check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be applied. """ system = 'linux' + result = None + if 'os_name' in self.get_host_variables(host): host_os_name = self.get_host_variables(host)['os_name'] if host_os_name == 'windows': system = 'windows' if system == 'linux': - a = self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=0644", + result = self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=0644", check=check) - print(a) else: - self.get_host(host).ansible("ansible.windows.win_copy", f"src='{src_path}' dest='{dest_path}'", check=check) + result = self.get_host(host).ansible("ansible.windows.win_copy", f"src='{src_path}' dest='{dest_path}'", check=check) + + return result def add_block_to_file(self, host: str, path: str, replace: str, before: str, after, check: bool = False): """Add text block to desired file. @@ -144,7 +168,7 @@ def add_block_to_file(self, host: str, path: str, replace: str, before: str, aft path (str): Path of the file replace (str): Text to be inserted in the file before (str): Lower stop of the block to be replaced - after (str): Upper stop of172.31.6.71 the block to be replaced + after (str): Upper stop of the block to be replaced check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be applied. Default `False`. """ @@ -164,7 +188,6 @@ def modify_file_content(self, host: str, path: str = None, content: Union[str, b tmp_file.write(content if isinstance(content, bytes) else content.encode()) tmp_file.seek(0) self.move_file(host, src_path=tmp_file.name, dest_path=path) - tmp_file.close() def control_service(self, host: str, service: str = 'wazuh', state: str = "started", check: bool = False): @@ -210,7 +233,6 @@ def get_file_content(self, host: str, file_path: str): host (str): Hostname file_path (str) : Path of the file """ - # return self.get_host(host).file(file_path).content_string ansible_method = 'command' command = 'cat' if 'os_name' in self.get_host_variables(host) and self.get_host_variables(host)['os_name'] == 'windows': @@ -222,16 +244,6 @@ def get_file_content(self, host: str, file_path: str): return result['stdout'] - # testinfra_host = self.get_host(host) - # result = testinfra_host.ansible("slurp", f"src='{file_path}'", check=False) - # print(result) - # if 'content' not in result: - # raise Exception(f"No content value in {result}") - - # decoded = base64.b64decode(result['content']).decode('utf-8') - # return decoded - - def apply_config(self, config_yml_path: str, dest_path: str = WAZUH_CONF, clear_files: list = None, restart_services: list = None): """Apply the configuration described in the config_yml_path to the environment. From a1a76b529a239c7a2ba5433c32e6771a9c9bb4b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 11:40:05 +0000 Subject: [PATCH 025/174] docs: standarize module docstrings --- .../wazuh_testing/end_to_end/configuration.py | 27 ++-- .../wazuh_testing/end_to_end/indexer_api.py | 11 -- .../wazuh_testing/end_to_end/logs.py | 31 ++-- .../wazuh_testing/end_to_end/monitoring.py | 23 ++- .../wazuh_testing/end_to_end/regex.py | 19 +++ .../end_to_end/remote_operations_handler.py | 136 +++++++++++------- .../wazuh_testing/end_to_end/services.py | 0 .../wazuh_testing/end_to_end/waiters.py | 21 +++ .../wazuh_testing/end_to_end/wazuh_api.py | 23 +-- 9 files changed, 167 insertions(+), 124 deletions(-) delete mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/services.py diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py index cc1c24dbff..3c6b8b31bd 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -1,23 +1,18 @@ -# Copyright (C) 2015, Wazuh Inc. -# Created by Wazuh, Inc. . -# This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ -Module Name: configuration.py +Configurations handler for remote hosts. +---------------------------------------- -Description: - This module provides functions for configuring and managing host configurations using the HostManager class - and related tools. +This module provides functions for configuring and managing host configurations using the HostManager class and related tools. Functions: - - backup_configurations(host_manager: HostManager) -> dict: - Backup configurations for all hosts in the specified host manager. - - - restore_backup(host_manager: HostManager, backup_configurations: dict) -> None: - Restore configurations for all hosts in the specified host manager. - - - configure_environment(host_manager: HostManager, configurations: dict) -> None: - Configure the environment for all hosts in the specified host manager. - This function uses ThreadPool to parallelize the configuration process. + - backup_configurations: Backup configurations for all hosts in the specified host manager. + - restore_backup: Restore configurations for all hosts in the specified host manager. + - configure_environment: Configure the environment for all hosts in the specified host manager. + + +Copyright (C) 2015, Wazuh Inc. +Created by Wazuh, Inc. . +This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ from multiprocessing.pool import ThreadPool import xml.dom.minidom diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index 18d0f96a39..ddf6f4741d 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -7,17 +7,6 @@ Functions: - get_indexer_values: Retrieves values from the Elasticsearch indexer API. -Usage Example: - import requests - from typing import Dict - from wazuh_testing.tools.system import HostManager - - # Usage of get_indexer_values - host_manager = HostManager() - credentials = {'user': 'admin', 'password': 'changeme'} - index = 'wazuh-alerts*' - response_data = get_indexer_values(host_manager, credentials, index) - Copyright (C) 2015, Wazuh Inc. Created by Wazuh, Inc. . diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py index 36e55bb3f0..05d34b302c 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py @@ -1,33 +1,20 @@ """ -Module Name: logs +Logs management module for remote hosts. +--------------------------------------- Description: This module provides functions for truncating logs and alerts for Wazuh agents and managers. Functions: - - truncate_agents_logs(host_manager: HostManager) -> None: - Truncate logs for Wazuh agents. + - truncate_agents_logs: Truncate logs for Wazuh agents. + - truncate_managers_logs: Truncate logs for Wazuh managers. + - truncate_logs: Truncate logs for both Wazuh agents and managers. + - truncate_alerts: Truncate Wazuh alerts. - Args: - host_manager: An instance of the HostManager class containing information about hosts. - - truncate_managers_logs(host_manager: HostManager) -> None: - Truncate logs for Wazuh managers. - - Args: - host_manager: An instance of the HostManager class containing information about hosts. - - - truncate_logs(host_manager: HostManager) -> None: - Truncate logs for both Wazuh agents and managers. - - Args: - host_manager: An instance of the HostManager class containing information about hosts. - - - truncate_alerts(host_manager: HostManager) -> None: - Truncate Wazuh alerts. - - Args: - host_manager: An instance of the HostManager class containing information about hosts. +Copyright (C) 2015, Wazuh Inc. +Created by Wazuh, Inc. . +This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ from wazuh_testing.end_to_end import logs_filepath_os diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index 22d3c54a2a..4908196625 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -1,24 +1,21 @@ """ -Module Name: monitoring +Monitoring remote host files module. +------------------------------------ Description: This module provides functions for monitoring events, files, and alerts in a Wazuh environment. Functions: - - monitoring_events_host_monitoring(host_manager: HostManager, monitoring_data: dict) -> dict: - Monitor events on hosts using the HostMonitor. + - monitoring_events_host_monitoring: Monitor events on hosts using the HostMonitor. + - monitoring_events_multihost: Monitor events on multiple hosts concurrently. + - generate_monitoring_logs_all_agent: Generate monitoring data for logs on all agent hosts. + - generate_monitoring_logs_manager: Generate monitoring data for logs on a specific manager host. + - generate_monitoring_alerts_all_agent: Generate monitoring data for alerts on all agent hosts. - - monitoring_events_multihost(host_manager: HostManager, monitoring_data: dict) -> None: - Monitor events on multiple hosts concurrently. - - generate_monitoring_logs_all_agent(host_manager: HostManager, regex_list: list, timeout_list: list) -> dict: - Generate monitoring data for logs on all agent hosts. - - - generate_monitoring_logs_manager(host_manager: HostManager, manager: str, regex: str, timeout: int) -> dict: - Generate monitoring data for logs on a specific manager host. - - - generate_monitoring_alerts_all_agent(host_manager: HostManager, events_metadata: dict) -> dict: - Generate monitoring data for alerts on all agent hosts. +Copyright (C) 2015, Wazuh Inc. +Created by Wazuh, Inc. . +This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ import tempfile diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py index 9158a6e033..83f705e682 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py @@ -1,3 +1,22 @@ +""" +Regex Patterns for Syscollector Events. +--------------------------------------- + +This module defines regular expression patterns for various events related to Syscollector. The patterns are used to extract information from log messages. + +Constants: + REGEX_PATTERNS (dict): A dictionary mapping event names to their respective regex patterns and parameters. + +Functions: + get_event_regex: Get the regex pattern for a specific event. + + +Copyright (C) 2015, Wazuh Inc. +Created by Wazuh, Inc. . +This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 +""" + + from typing import Dict diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 859900a595..ab9f63904c 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -1,11 +1,42 @@ -from wazuh_testing.end_to_end.regex import get_event_regex -from wazuh_testing.end_to_end.monitoring import monitoring_events_multihost -from wazuh_testing.end_to_end.indexer_api import get_indexer_values, STATE_INDEX_NAME +""" +Remote Operations Module. +------------------------- +This module provides functions for launching remote operations on hosts and managing vulnerability checks. It utilizes the Wazuh testing framework, including the HostManager class for handling remote hosts and various tools for indexer API interactions. + +Functions: + - launch_remote_operation: Launch a remote operation on a specified host. + - check_vuln_state_index: Check the vulnerability state index for a host. + - check_vuln_alert_indexer: Check vulnerability alerts in the indexer for a host. + - check_vuln_alert_api: Check vulnerability alerts via API for a host. + - launch_remote_sequential_operation_on_agent: Launch sequential remote operations on a specific agent. + - launch_parallel_operations: Launch parallel remote operations on multiple hosts. + +Copyright (C) 2015, Wazuh Inc. +Created by Wazuh, Inc. . +This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 +""" + + +from typing import Dict, List from multiprocessing.pool import ThreadPool +from wazuh_testing.end_to_end.indexer_api import get_indexer_values +from wazuh_testing.tools.system import HostManager + -def launch_remote_operation(host, operation_data, host_manager): +def launch_remote_operation(host: str, operation_data: Dict[str,Dict], host_manager: HostManager): + """ + Launch a remote operation on the specified host. + + Args: + host (str): The target host on which to perform the operation. + operation_data (dict): Dictionary containing operation details. + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + + Raises: + ValueError: If the specified operation is not recognized. + """ host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] host_os_arch = host_manager.get_host_variables(host)['arch'] system = host_manager.get_host_variables(host)['os_name'] @@ -14,7 +45,6 @@ def launch_remote_operation(host, operation_data, host_manager): if system == 'linux': system = host_manager.get_host_variables(host)['os'].split('_')[0] - if operation == 'install_package': package_data = operation_data['package'] package_url = package_data[host_os_name][host_os_arch] @@ -30,9 +60,6 @@ def launch_remote_operation(host, operation_data, host_manager): if operation_data['parameters']['alert_indexed']: check_vuln_alert_indexer(host_manager, operation_data['vulnerability_data']) - if operation_data['parameters']['alert']: - check_vuln_alert(host_manager, operation_data['vulnerability_data']) - if operation_data['parameters']['api']: check_vuln_alert_api(host_manager, operation_data['vulnerability_data']) @@ -40,73 +67,80 @@ def launch_remote_operation(host, operation_data, host_manager): check_vuln_state_index(host_manager, operation_data['vulnerability_data']) -def check_vuln_state_index(host_manager, vulnerability_data): - # Check Index values - # Retry 3 times, 10 timestamp - index_vuln_state_content = get_indexer_values(host_manager) - # Process alerts +def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[str, Dict]): + """ + Check vulnerability state index for a host. -def check_vuln_alert_indexer(host_manager, vulnerability_data): - indexer_alerts = get_indexer_values(host_manager, index='wazuh-alerts*') - return indexer_alerts + Args: + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + vulnerability_data (dict): Dictionary containing vulnerability data. + ToDo: + Implement the functionality. + """ + index_vuln_state_content = get_indexer_values(host_manager) -def check_vuln_alert_api(host_manager, vulnerability_data): - pass +def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict[str, Dict]): + """ + Check vulnerability alerts in the indexer for a host. -def check_vuln_alert(host_manager, vulnerability_data): - monitoring_data = {} + Args: + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + vulnerability_data (dict): Dictionary containing vulnerability data. - for agent in host_manager.get_group_hosts('agent'): - host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] - host_os_arch = host_manager.get_host_variables(agent)['arch'] + Returns: + list: List of vulnerability alerts. - agent_vulnerability_data_parameters = vulnerability_data[host_os_name][host_os_arch] - agent_vulnerability_data_parameters['HOST_NAME'] = agent + ToDo: + Implement the functionality. + """ + indexer_alerts = get_indexer_values(host_manager, index='wazuh-alerts*') - for cve in agent_vulnerability_data_parameters['CVE']: - parameters = agent_vulnerability_data_parameters.copy() - parameters['CVE'] = cve - agent_vulnerability_data = { - 'event': 'vulnerability_alert', - 'parameters': parameters - } + return indexer_alerts - regex = get_event_regex(agent_vulnerability_data) - monitoring_element = { - 'regex': regex, - 'file': '/var/ossec/logs/alerts/alerts.json', - 'timeout': 30, - } +def check_vuln_alert_api(host_manager: HostManager, vulnerability_data: Dict[str, Dict]): + """ + Check vulnerability alerts via API for a host. - if host_manager.get_host_variables(agent)['manager'] not in monitoring_data: - monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] + Args: + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + vulnerability_data (dict): Dictionary containing vulnerability data. - monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) + ToDo: + Implement the functionality. + """ + pass - monitoring_events_multihost(host_manager, monitoring_data) +def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict], host_manager: HostManager): + """ + Launch sequential remote operations on an agent. -def launch_remote_sequential_operation_on_agent(agent, task_list, host_manager): - print(task_list) + Args: + agent (str): The target agent on which to perform the operations. + task_list (list): List of dictionaries containing operation details. + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + """ if task_list: for task in task_list: - task_keys = list(task.keys()) - task_values = list(task.values()) - operation, operation_data = task_keys[0], task_values[0] - launch_remote_operation(agent, operation, operation_data, host_manager) + launch_remote_operation(agent, task, host_manager) + +def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager): + """ + Launch parallel remote operations on multiple hosts. -def launch_parallel_operations(task_list, host_manager): - print("Launch parallel operations") + Args: + task_list (list): List of dictionaries containing operation details. + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + """ for task in task_list: parallel_configuration = [] target = task['target'] for host in host_manager.get_group_hosts(target): - print(f"Append {host} {task_list} {host_manager}") parallel_configuration.append((host, task, host_manager)) with ThreadPool() as pool: diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/services.py b/deps/wazuh_testing/wazuh_testing/end_to_end/services.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index 2d70bdd51e..17f97b553b 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -1,3 +1,24 @@ +""" +Vulnerability Data Update and Scan Monitoring Module. +----------------------------------------------------- + +This module provides functions for waiting until vulnerability data is updated for all manager hosts and until vulnerability scans for all agents are finished. + +Functions: + - wait_until_vd_is_updated: Wait until the vulnerability data is updated for all manager hosts. + - wait_until_vuln_scan_agents_finished: Wait until vulnerability scans for all agents are finished. + +Dependencies: + - wazuh_testing.end_to_end.monitoring: Module containing functions for generating monitoring logs and handling events. + - wazuh_testing.end_to_end.wazuh_api: Module containing functions for retrieving agent IDs. + - wazuh_testing.tools.system: Module providing the HostManager class for handling the environment. + + +Copyright (C) 2015, Wazuh Inc. +Created by Wazuh, Inc. . +This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 +""" + from wazuh_testing.end_to_end.monitoring import ( generate_monitoring_logs_manager, monitoring_events_multihost diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py index 19635f4410..b508a34dac 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py @@ -1,19 +1,20 @@ """ -Wazuh API Operations Module +Wazuh API Methods Module. +------------------------- -This module provides functions for handling Wazuh API operations in a HostManager environment. -It includes methods for retrieving API parameters, obtaining authentication tokens, -and retrieving information about Wazuh agents and their vulnerabilities. +This module provides functions for interacting with the Wazuh API, including retrieving API parameters, obtaining an API token for authentication, and fetching information about Wazuh agents and their vulnerabilities. Functions: -- get_api_parameters(host_manager): Retrieves Wazuh API parameters. -- get_api_token(host_manager): Retrieves the API token for authentication. -- get_agents_id(host_manager): Retrieves the IDs of Wazuh agents. -- get_agents_vulnerabilities(host_manager): Retrieves vulnerability information for Wazuh agents. - -Parameters: -- host_manager (HostManager): An instance of the HostManager class representing the Wazuh environment. + - get_api_parameters: Retrieves the Wazuh API parameters. + - get_api_token: Retrieves the API token for authentication. + - get_agents_id: Retrieves the IDs of Wazuh agents. + - get_agents_vulnerabilities: Retrieves vulnerability information for Wazuh agents. + +Copyright (C) 2015, Wazuh Inc. +Created by Wazuh, Inc. . +This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ + from wazuh_testing.api import make_api_call, get_token_login_api # Wazuh API Methods From 0219de0960d3f6e5c3871d518a79a38619be6f08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 15:56:13 +0000 Subject: [PATCH 026/174] feat: include logger for VD E2E tests --- tests/end_to_end/pytest.ini | 5 ++++ .../test_vulnerability_detector/test_scans.py | 30 ++++++++++++++++--- 2 files changed, 31 insertions(+), 4 deletions(-) create mode 100644 tests/end_to_end/pytest.ini diff --git a/tests/end_to_end/pytest.ini b/tests/end_to_end/pytest.ini new file mode 100644 index 0000000000..e561b2bf7f --- /dev/null +++ b/tests/end_to_end/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +log_cli = 1 +log_cli_level = ERROR +log_cli_format = %(asctime)s %(message)s (%(filename)s:%(lineno)s) +log_cli_date_format=%Y-%m-%d %H:%M:%S diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_scans.py index 5a31449081..5e1831232c 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_scans.py +++ b/tests/end_to_end/test_vulnerability_detector/test_scans.py @@ -42,6 +42,7 @@ """ import os import pytest +import logging from typing import Generator, Dict, List @@ -56,7 +57,9 @@ from wazuh_testing.tools.system import HostManager -TIMEOUT_SYSCOLLECTOR_SCAN = 120 +TIMEOUT_SYSCOLLECTOR_SCAN = 200 +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) local_path = os.path.dirname(os.path.abspath(__file__)) current_dir = os.path.dirname(__file__) @@ -86,25 +89,35 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: Args: host_manager (HostManager): An instance of the HostManager class. """ + logger.error("Init setup of environment") + # Configure managers and agents + logger.error("Getting backup of current configurations") hosts_configuration_backup = backup_configurations(host_manager) + logger.error("Configuring environment") configure_environment(host_manager, load_vulnerability_detector_configurations()) # Restart managers and stop agents + logger.error("Stopping agents") host_manager.control_environment('stop', ['agent']) + logger.error("Restarting managers") host_manager.control_environment('restart', ['manager']) # Wait until VD is updated + logger.error("Wait until Vulnerability Detector has update all the feeds") wait_until_vd_is_updated(host_manager) # Truncate alerts and logs of managers and agents + logger.error("Truncate managers and agents logs") truncate_logs(host_manager) # Start agents + logger.error("Starting agents") host_manager.control_environment('start', ['agent']) yield + logger.error("Restoring original configuration") restore_backup(host_manager, hosts_configuration_backup) @@ -130,6 +143,7 @@ def check_vuln_state_index(host_manager: HostManager, agents_vulnerabilities_api @pytest.mark.dependency() +@pytest.mark.filterwarnings('ignore::urllib3.exceptions.InsecureRequestWarning') def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): """ description: Validates the initiation of Syscollector scans across all agents in the environment. @@ -162,6 +176,7 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): - vulnerability_detector """ # Monitor for the first Syscollector scan in all the agents + logger.critical("Monitoring Syscollector First Scan") monitoring_data = generate_monitoring_logs_all_agent(host_manager, [get_event_regex({'event': 'syscollector_scan_start'}), get_event_regex({'event': 'syscollector_scan_end'})], @@ -170,25 +185,28 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): monitoring_events_multihost(host_manager, monitoring_data) # Truncate agents logs to detect second scan + logger.critical("Truncating agent's logs") truncate_agents_logs(host_manager) # Wait until all agents has been scanned + logger.critical("Waiting until agent's VD scan is over") wait_until_vuln_scan_agents_finished(host_manager) # Check vulnerabilities for agent + logger.critical("Check agent's vulnerabilities") agents_vuln_before_second_scan = get_agents_vulnerabilities(host_manager) for agent, vuln in agents_vuln_before_second_scan.items(): assert vuln, f"No vulnerabilities were detected for agent {agent}" - # Get agent's vulnerabilities - agents_vuln_before_second_scan = get_agents_vulnerabilities(host_manager) - # Compare agents_vuln_before_second_scan with state_index_content + logger.critical("Comparing state and API vulnerabilities for each agent") check_vuln_state_index(host_manager, agents_vuln_before_second_scan) # Truncate manager_logs to prevent trigger wait_until_vuln_scan_agents_finished wrongly + logger.critical("Truncating manager logs") truncate_managers_logs(host_manager) + logger.critical("Monitoring Second Syscollector scan") # The Agent's syscollector second scan is run monitoring_data = generate_monitoring_logs_all_agent(host_manager, [get_event_regex({'event': 'syscollector_scan_start'}), @@ -200,12 +218,16 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): # WARNING # Is possible that second scan will not produce expected Finished Scan in the agent. # In that case search for another event or include a hardcoded timeout + logger.critical("Waiting until agent's VD scan is over") wait_until_vuln_scan_agents_finished(host_manager) + logger.critical("Getting agents's vulnerabilities") agents_vuln_after_second_scan = get_agents_vulnerabilities(host_manager) assert agents_vuln_before_second_scan == agents_vuln_after_second_scan # Compare agents_vuln_after_second_scan with state_index_content + logger.critical("Comparing state and API vulnerabilities for each agent") check_vuln_state_index(host_manager, agents_vuln_after_second_scan) + From b1f73041054279e7c51b2155e48759d7597322c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 15:56:38 +0000 Subject: [PATCH 027/174] fix: syscollector end regex --- deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py | 1 - deps/wazuh_testing/wazuh_testing/end_to_end/regex.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index ddf6f4741d..ffe5059a34 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -20,7 +20,6 @@ STATE_INDEX_NAME = 'wazuh-vulnerabilities-states' - def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': 'admin', 'password': 'changeme'}, index: str = 'wazuh-alerts*') -> Dict: """ Get values from the Wazuh Elasticsearch indexer API. diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py index 83f705e682..549beb840a 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py @@ -25,7 +25,7 @@ 'regex': '.*INFO: Starting evaluation.' }, 'syscollector_scan_end': { - 'regex': '.*INFO: Ending evaluation.' + 'regex': '.*INFO: Evaluation finished.' }, 'syscollector_install_package_alert_yum': { 'regex': '.*installed.*agent".*"name":"(\S+)".*Installed: (\S+).*?(\S+)', From afc6f4998c729506a5c71aa111bd135a14cc5d19 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 16:02:10 +0000 Subject: [PATCH 028/174] feat: include armv agents into VD E2E tests --- .../environments/e2e_vulnerability_detector.yaml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/provisioning/environments/e2e_vulnerability_detector.yaml b/provisioning/environments/e2e_vulnerability_detector.yaml index a4447c0618..8cf5521ef0 100644 --- a/provisioning/environments/e2e_vulnerability_detector.yaml +++ b/provisioning/environments/e2e_vulnerability_detector.yaml @@ -22,3 +22,16 @@ agent3: roles: [agent] os: ubuntu_22 manager: manager1 + +agent4: + roles: [agent] + os: centos_7 + manager: manager1 + arch: arm64v8 + +agent5: + roles: [agent] + os: ubuntu_22 + manager: manager2 + arch: amd64 + arch: arm64v8 From 87a36a8ecbf325f1bc2878e90ffcb6a031cba7ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 16:19:38 +0000 Subject: [PATCH 029/174] fix: e2e vd environment dictionary --- .../environment_test_dictionary.json | 44 +++++++++---------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/tests/system/provisioning/environment_test_dictionary.json b/tests/system/provisioning/environment_test_dictionary.json index fd2a6c1482..b0272cd45f 100644 --- a/tests/system/provisioning/environment_test_dictionary.json +++ b/tests/system/provisioning/environment_test_dictionary.json @@ -44,29 +44,29 @@ "system/test_cluster/test_agent_groups/test_groups_sync_time.py" ], "end_to_end_environment": [ - "end_to_end/test_basic_cases/test_audit/test_audit.py", - "end_to_end/test_basic_cases/test_aws_infrastructure_monitoring/test_aws_infrastructure_monitoring.py", - "end_to_end/test_basic_cases/test_brute_force/test_brute_force_rdp/test_brute_force_rdp.py", - "end_to_end/test_basic_cases/test_brute_force/test_brute_force_ssh/test_brute_force_ssh.py", - "end_to_end/test_basic_cases/test_detecting_suspicious_binaries/test_detecting_suspicious_binaries.py", - "end_to_end/test_basic_cases/test_docker_monitoring/test_docker_monitoring.py", - "end_to_end/test_basic_cases/test_emotet/test_emotet.py", - "end_to_end/test_basic_cases/test_fim/test_fim_linux/test_fim_linux.py", - "end_to_end/test_basic_cases/test_fim/test_fim_windows/test_fim_windows.py", - "end_to_end/test_basic_cases/test_ip_reputation/test_ip_reputation.py", - "end_to_end/test_basic_cases/test_osquery_integration/test_osquery_integration.py", - "end_to_end/test_basic_cases/test_shellshock_attack_detection/test_shellshock_attack_detection.py", - "end_to_end/test_basic_cases/test_slack_integration/test_slack_integration.py", - "end_to_end/test_basic_cases/test_sql_injection/test_sql_injection.py", - "end_to_end/test_basic_cases/test_suricata_integration/test_suricata_integration.py", - "end_to_end/test_basic_cases/test_unauthorized_processes_detection/test_unauthorized_processes_detection.py", - "end_to_end/test_basic_cases/test_virustotal_integration/test_virustotal_integration.py", - "end_to_end/test_basic_cases/test_vulnerability_detector/test_vulnerability_detector_linux/test_vulnerability_detector_linux.py", - "end_to_end/test_basic_cases/test_vulnerability_detector/test_vulnerability_detector_windows/test_vulnerability_detection_windows.py", - "end_to_end/test_basic_cases/test_windows_defender/test_windows_defender.py", - "end_to_end/test_basic_cases/test_yara_integration/test_yara_integration.py", + "test_basic_cases/test_audit/test_audit.py", + "test_basic_cases/test_aws_infrastructure_monitoring/test_aws_infrastructure_monitoring.py", + "test_basic_cases/test_brute_force/test_brute_force_rdp/test_brute_force_rdp.py", + "test_basic_cases/test_brute_force/test_brute_force_ssh/test_brute_force_ssh.py", + "test_basic_cases/test_detecting_suspicious_binaries/test_detecting_suspicious_binaries.py", + "test_basic_cases/test_docker_monitoring/test_docker_monitoring.py", + "test_basic_cases/test_emotet/test_emotet.py", + "test_basic_cases/test_fim/test_fim_linux/test_fim_linux.py", + "test_basic_cases/test_fim/test_fim_windows/test_fim_windows.py", + "test_basic_cases/test_ip_reputation/test_ip_reputation.py", + "test_basic_cases/test_osquery_integration/test_osquery_integration.py", + "test_basic_cases/test_shellshock_attack_detection/test_shellshock_attack_detection.py", + "test_basic_cases/test_slack_integration/test_slack_integration.py", + "test_basic_cases/test_sql_injection/test_sql_injection.py", + "test_basic_cases/test_suricata_integration/test_suricata_integration.py", + "test_basic_cases/test_unauthorized_processes_detection/test_unauthorized_processes_detection.py", + "test_basic_cases/test_virustotal_integration/test_virustotal_integration.py", + "test_basic_cases/test_vulnerability_detector/test_vulnerability_detector_linux/test_vulnerability_detector_linux.py", + "test_basic_cases/test_vulnerability_detector/test_vulnerability_detector_windows/test_vulnerability_detection_windows.py", + "test_basic_cases/test_windows_defender/test_windows_defender.py", + "test_basic_cases/test_yara_integration/test_yara_integration.py" ], "e2e_vulnerability_detector": [ - "end_to_end/test_vulnerability_detector/test_scans.py" + "test_vulnerability_detector/test_scans.py" ] } From 1d31d7da53f5f59e50c503a6beb1668680b801e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 21 Nov 2023 17:02:37 +0000 Subject: [PATCH 030/174] fix: replace filebeat version to 2 --- provisioning/roles/wazuh/ansible-filebeat-oss/defaults/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/provisioning/roles/wazuh/ansible-filebeat-oss/defaults/main.yml b/provisioning/roles/wazuh/ansible-filebeat-oss/defaults/main.yml index b3332cf924..ea8cb8b81c 100644 --- a/provisioning/roles/wazuh/ansible-filebeat-oss/defaults/main.yml +++ b/provisioning/roles/wazuh/ansible-filebeat-oss/defaults/main.yml @@ -9,7 +9,7 @@ filebeat_output_indexer_hosts: - "localhost:9200" filebeat_module_package_url: https://packages.wazuh.com/4.x/filebeat -filebeat_module_package_name: wazuh-filebeat-0.3.tar.gz +filebeat_module_package_name: wazuh-filebeat-0.2.tar.gz filebeat_module_package_path: /tmp/ filebeat_module_destination: /usr/share/filebeat/module filebeat_module_folder: /usr/share/filebeat/module/wazuh From e53b116cae03254b50975d5b2cb0de693c7f07e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 22 Nov 2023 15:29:56 +0000 Subject: [PATCH 031/174] feat: include enrollment name as hostname by default --- .../wazuh_testing/wazuh_testing/end_to_end/waiters.py | 2 +- .../templates/var-ossec-etc-ossec-agent.conf.j2 | 11 +++-------- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index 17f97b553b..2f1e63e3a7 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -56,7 +56,7 @@ def wait_until_vuln_scan_agents_finished(host_manager: HostManager) -> None: agents_id = get_agents_id(host_manager) agent_id = agents_id.get(agent, '') finished_scan_pattern = rf"Finished vulnerability assessment for agent '{agent_id}'" - + monitoring_data = generate_monitoring_logs_manager( host_manager, manager_host, finished_scan_pattern, 700 ) diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 b/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 index 350d4fedd8..cb3567caab 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 @@ -37,14 +37,15 @@ {{ wazuh_auto_restart }} {{ wazuh_crypto_method }} - {% if wazuh_agent_config.enrollment.enabled == 'yes' %} {{ wazuh_agent_config.enrollment.enabled }} {% if wazuh_agent_config.enrollment.manager_address | length > 0 %} {{ wazuh_agent_config.enrollment.manager_address }} {% endif %} {% if wazuh_agent_config.enrollment.agent_name | length > 0 %} - {{ ansible_hostname }} + {{ wazuh_agent_config.enrollment.agent_name }} + {% else %} + {{ ansible_hostname }} {% endif %} {% if wazuh_agent_config.enrollment.port is defined > 0 %} {{ wazuh_agent_config.enrollment.port }} @@ -77,12 +78,6 @@ {{ wazuh_agent_config.enrollment.use_source_ip }} {% endif %} - {% else %} - - {{ ansible_hostname }} - - - {% endif %} From c192df4af436b7066bef88ce1f80abd863d5903d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 22 Nov 2023 17:06:59 +0000 Subject: [PATCH 032/174] fix: angent name provision names --- .../templates/var-ossec-etc-ossec-agent.conf.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 b/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 index cb3567caab..30b87930ba 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 @@ -45,7 +45,7 @@ {% if wazuh_agent_config.enrollment.agent_name | length > 0 %} {{ wazuh_agent_config.enrollment.agent_name }} {% else %} - {{ ansible_hostname }} + {{ inventory_hostname }} {% endif %} {% if wazuh_agent_config.enrollment.port is defined > 0 %} {{ wazuh_agent_config.enrollment.port }} From 126c84581ca916dbec72d5fc58b105da1c144914 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 23 Nov 2023 15:06:28 +0000 Subject: [PATCH 033/174] fix: increased timeout for Finished scan waiter --- deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index 2f1e63e3a7..83019fb3c8 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -51,6 +51,12 @@ def wait_until_vuln_scan_agents_finished(host_manager: HostManager) -> None: Args: host_manager (HostManager): Host manager instance to handle the environment. """ + # The order of agents may not be guaranteed. + # The Vulnerability Detector scans are ordered based on the agent ID. + # We are currently awaiting completion of all scans globally, + # with a timeout set to 5 minutes for each agent. + final_timeout = 300 * len(host_manager.get_group_hosts('agent')) + for agent in host_manager.get_group_hosts('agent'): manager_host = host_manager.get_host_variables(agent)['manager'] agents_id = get_agents_id(host_manager) @@ -58,7 +64,7 @@ def wait_until_vuln_scan_agents_finished(host_manager: HostManager) -> None: finished_scan_pattern = rf"Finished vulnerability assessment for agent '{agent_id}'" monitoring_data = generate_monitoring_logs_manager( - host_manager, manager_host, finished_scan_pattern, 700 + host_manager, manager_host, finished_scan_pattern, final_timeout ) monitoring_events_multihost(host_manager, monitoring_data) From 753e58a638b80e7f04575cd7750c30021c1fbc00 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 23 Nov 2023 15:26:28 +0000 Subject: [PATCH 034/174] feat: rename test_scans to test_vd --- .../{test_scans.py => test_vulnerability_detector.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/end_to_end/test_vulnerability_detector/{test_scans.py => test_vulnerability_detector.py} (100%) diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py similarity index 100% rename from tests/end_to_end/test_vulnerability_detector/test_scans.py rename to tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py From 1d25dcec90e4c5ee1fdfb7ac8a6ecb7c25b5ca14 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 23 Nov 2023 15:40:40 +0000 Subject: [PATCH 035/174] fix: VD E2E environment --- provisioning/environments/e2e_vulnerability_detector.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/provisioning/environments/e2e_vulnerability_detector.yaml b/provisioning/environments/e2e_vulnerability_detector.yaml index 8cf5521ef0..4097e4e793 100644 --- a/provisioning/environments/e2e_vulnerability_detector.yaml +++ b/provisioning/environments/e2e_vulnerability_detector.yaml @@ -27,11 +27,10 @@ agent4: roles: [agent] os: centos_7 manager: manager1 - arch: arm64v8 + architecture: arm64v8 agent5: roles: [agent] os: ubuntu_22 manager: manager2 - arch: amd64 - arch: arm64v8 + architecture: arm64v8 From 0606ce894b5d201551bd4f0fd93756d1c3af8c68 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 23 Nov 2023 15:54:38 +0000 Subject: [PATCH 036/174] fix: error in environments dict --- tests/system/provisioning/environment_test_dictionary.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system/provisioning/environment_test_dictionary.json b/tests/system/provisioning/environment_test_dictionary.json index b0272cd45f..3de2bfed9d 100644 --- a/tests/system/provisioning/environment_test_dictionary.json +++ b/tests/system/provisioning/environment_test_dictionary.json @@ -67,6 +67,6 @@ "test_basic_cases/test_yara_integration/test_yara_integration.py" ], "e2e_vulnerability_detector": [ - "test_vulnerability_detector/test_scans.py" + "test_vulnerability_detector/test_vulnerability_detector.py" ] } From c6d57ae8e91db0f4322a168a2e6dec7a6acbd96e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 24 Nov 2023 15:29:21 +0000 Subject: [PATCH 037/174] fix: errors in remove_operations handlers and HostManager class --- .../end_to_end/remote_operations_handler.py | 79 +++++- .../wazuh_testing/tools/system.py | 32 +-- .../cases/test_vulnerability.yaml | 224 ++++++++++++++++++ .../{data => }/configurations/agent.yaml | 0 .../{data => }/configurations/manager.yaml | 0 .../test_vulnerability_detector/conftest.py | 19 ++ .../test_vulnerability_detector.py | 56 ++++- 7 files changed, 380 insertions(+), 30 deletions(-) create mode 100644 tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml rename tests/end_to_end/test_vulnerability_detector/{data => }/configurations/agent.yaml (100%) rename tests/end_to_end/test_vulnerability_detector/{data => }/configurations/manager.yaml (100%) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index ab9f63904c..491da9c58e 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -23,6 +23,10 @@ from wazuh_testing.end_to_end.indexer_api import get_indexer_values from wazuh_testing.tools.system import HostManager +from wazuh_testing.end_to_end.wazuh_api import get_agents_vulnerabilities +from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_all_agent, monitoring_events_multihost +from wazuh_testing.end_to_end.waiters import wait_until_vuln_scan_agents_finished +from wazuh_testing.end_to_end.regex import get_event_regex def launch_remote_operation(host: str, operation_data: Dict[str,Dict], host_manager: HostManager): @@ -38,29 +42,50 @@ def launch_remote_operation(host: str, operation_data: Dict[str,Dict], host_mana ValueError: If the specified operation is not recognized. """ host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] - host_os_arch = host_manager.get_host_variables(host)['arch'] + host_os_arch = host_manager.get_host_variables(host)['architecture'] system = host_manager.get_host_variables(host)['os_name'] operation = operation_data['operation'] + + print("Performing remote operations") + + if system == 'linux': system = host_manager.get_host_variables(host)['os'].split('_')[0] if operation == 'install_package': + print("Installing package") package_data = operation_data['package'] package_url = package_data[host_os_name][host_os_arch] host_manager.install_package(host, package_url, system) + TIMEOUT_SYSCOLLECTOR_SCAN = 60 + + # Wait until syscollector + monitoring_data = generate_monitoring_logs_all_agent(host_manager, + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) + + monitoring_events_multihost(host_manager, monitoring_data) + + # Wait until VD scan + wait_until_vuln_scan_agents_finished(host_manager) + elif operation == 'remove_package': + print("Removing package") package_data = operation_data['package'] - package_name = package_data[host_os_name] + package_name = package_data[host_os_name][host_os_arch] host_manager.remove_package(host, package_name, system) elif operation == 'check_agent_vulnerability': - + print("Check agent vuln") if operation_data['parameters']['alert_indexed']: + print("Check alert indexed") check_vuln_alert_indexer(host_manager, operation_data['vulnerability_data']) if operation_data['parameters']['api']: + print("Check vuln in api response") check_vuln_alert_api(host_manager, operation_data['vulnerability_data']) if operation_data['parameters']['state_indice']: @@ -95,10 +120,9 @@ def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict ToDo: Implement the functionality. """ - indexer_alerts = get_indexer_values(host_manager, index='wazuh-alerts*') - - return indexer_alerts + indexer_alerts = get_indexer_values(host_manager) + pass def check_vuln_alert_api(host_manager: HostManager, vulnerability_data: Dict[str, Dict]): """ @@ -111,7 +135,48 @@ def check_vuln_alert_api(host_manager: HostManager, vulnerability_data: Dict[str ToDo: Implement the functionality. """ - pass + + api_vulns = get_agents_vulnerabilities(host_manager) + not_found_vuln = [] + + + + for agent in host_manager.get_group_hosts('agent'): + print("\n\n---------------------------------") + print(f"Agent {agent}") + + agent_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] + agent_arch_name = host_manager.get_host_variables(agent)['architecture'] + vulnerability_data_agent = vulnerability_data[agent_os_name][agent_arch_name] + current_vulns_agent = api_vulns[agent] + print(f"Vuln of agent {agent}: {vulnerability_data_agent}") + for vulnerability in vulnerability_data_agent: + print(f"Searching for {agent} and {vulnerability['CVE']}") + expected_vuln = { + 'status': 'VALID', + 'cve': vulnerability['CVE'] + } + found = False + for current_vulnerability in current_vulns_agent: + if all(current_vulnerability[key] == value for key, value in expected_vuln.items()): + found = True + print(f"Found {current_vulnerability}") + + if not found: + not_found_vuln.append({ + 'agent': agent, + 'cve': vulnerability['CVE'] + }) + print("\n\n---------------------------------") + + + print(f"No found {not_found_vuln}") + assert len(not_found_vuln) == 0 + + # Check alerts + + + def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict], host_manager: HostManager): diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index bfb3f7fe7f..eea2b70d3f 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -475,13 +475,14 @@ def install_package(self, host, url, system='ubuntu'): Supported values: 'windows', 'ubuntu', 'centos'. Returns: - Dict: Testinfra Ansible Response of the operation + Dict: Testinfra Ansible Response of the operation Example: host_manager.install_package('my_host', 'http://example.com/package.deb', system='ubuntu') """ result = False - + print(host) + print(url) if system =='windows': result = self.get_host(host).ansible("win_package", f"path={url} arguments=/S", check=False) elif system == 'ubuntu': @@ -493,6 +494,7 @@ def install_package(self, host, url, system='ubuntu'): if 'rc' in result and result['rc'] == 0 and result['changed'] == True: result = True + print(result) return result def get_master_ip(self): @@ -525,24 +527,23 @@ def remove_package(self, host, package_name, system): Supported values: 'windows', 'ubuntu', 'centos'. Returns: - Dict: Testinfra Ansible Response of the operation + Dict: Testinfra Ansible Response of the operation Example: host_manager.remove_package('my_host', 'my_package', system='ubuntu') """ result = False - - if system == 'windows': - result = self.get_host(host).ansible("win_package", f"path={package_name} state=absent arguments=/S", check=False) - elif system == 'ubuntu': - result = self.get_host(host).ansible("apt", f"name={package_name} state=absent", check=False) - if result['changed'] == True and result['stderr'] == '': - result = True - elif system == 'centos': - result = self.get_host(host).ansible("yum", f"name={package_name} state=absent", check=False) - if 'rc' in result and result['rc'] == 0 and result['changed'] == True: - result = True - + os_name = self.get_host_variables(host)['os_name'] + if os_name == 'windows': + result = self.get_host(host).ansible("win_package", f"product_id={package_name} state=absent arguments=/S", check=False) + elif os_name == 'linux': + os = self.get_host_variables(host)['os'].split('_')[0] + if os == 'centos': + result = self.get_host(host).ansible("yum", f"name={package_name} state=absent", check=False) + elif os == 'ubuntu': + result = self.get_host(host).ansible("apt", f"name={package_name} state=absent", check=False) + + print(result) return result def handle_wazuh_services(self, host, operation): @@ -600,4 +601,3 @@ def clean_environment(host_manager, target_files): """ for target in target_files: host_manager.clear_file(host=target[0], file_path=target[1]) - diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml new file mode 100644 index 0000000000..21af4c3600 --- /dev/null +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -0,0 +1,224 @@ +- case: "Installation of a vulnerable package" + id: "install_package" + description: "Installation of a vulnerable package" + preconditions: null + body: + tasks: + - operation: install_package + target: agent + package: + centos: + amd64: https://nmap.org/dist/nmap-6.46-1.x86_64.rpm + arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.17-2PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.17-2PGDG.rhel7.aarch64.rpm] + ubuntu: + amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb + arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb + windows: + amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe + macos: + amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg + + - operation: check_agent_vulnerability + target: agent + parameters: + alert_indexed: False + api: True + alert: False + state_indice: False + vulnerability_data: + centos: + amd64: + - PACKAGE_NAME: "nmap" + PACKAGE_VERSION: "6.46-1" + CVE: CVE-2018-15173 + arm64v8: + - PACKAGE_NAME: "postgresql11" + PACKAGE_VERSION: "11.17.2" + CVE: CVE-2023-39417 + ubuntu: + amd64: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.5" + CVE: CVE-2023-2183 + arm64v8: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.5" + CVE: CVE-2023-2183 + windows: + amd64: + - PACKAGE_NAME: "vlc" + PACKAGE_VERSION: "3.0.6" + CVE: CVE-2023-47360 + macos: + amd64: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.0.1" + CVE: CVE-2022-21824 + # teardown: + # tasks: + # - operation: remove_package + # target: agent + # package: + # centos: + # amd64: rclone + # arm64v8: postgresql11* + # ubuntu: + # amd64: grafana* + # arm64v8: grafana* + # windows: + # amd64: vlc + # macos: + # amd64: node* + +# ---------------------------------------------------------------------------------- + +- case: "Updating a vulnerable package that remains vulnerable to the same CVE" + id: "update_vuln_package_vuln_remain" + description: "Updating a vulnerable package that remains vulnerable to the same CVE" + preconditions: null + body: + tasks: + - operation: install_package + target: agent + package: + centos: + amd64: https://nmap.org/dist/nmap-6.47-1.x86_64.rpm + arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.18-2PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.18-1PGDG.rhel7.aarch64.rpm] + ubuntu: + amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb + arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb + windows: + amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.8-win64.exe + macos: + amd64: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg + - operation: check_agent_vulnerability + target: agent + parameters: + alert_indexed: False + api: True + alert: False + state_indice: False + vulnerability_data: + centos: + amd64: + - PACKAGE_NAME: "nmap" + PACKAGE_VERSION: "6.47-1" + CVE: CVE-2020-28924 + arm64v8: + - PACKAGE_NAME: "postgresql11" + PACKAGE_VERSION: "11.17.2" + CVE: CVE-2023-39417 + ubuntu: + amd64: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.6" + CVE: CVE-2023-2183 + arm64v8: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.6" + CVE: CVE-2023-2183 + windows: + amd64: + - PACKAGE_NAME: "vlc" + PACKAGE_VERSION: "3.0.8" + CVE: CVE-2023-47360 + macos: + amd64: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.1.0" + CVE: CVE-2022-21824 + teardown: + tasks: + - operation: remove_package + target: agent + package: + windows: + amd64: vlc + +# --------------------------------------------------------------------- + +- case: "Updating a vulnerable package that becomes vulnerable to another CVE" + id: "updating_vulnerable_package_another_cve" + description: "Updating a vulnerable package that becomes vulnerable to another CVE" + preconditions: + tasks: + - operation: install_package + target: agent + package: + windows: + amd64: https://get.videolan.org/vlc/3.0.7/win32/vlc-3.0.7-win32.exe + - operation: check_agent_vulnerability + target: agent + parameters: + alert_indexed: False + api: True + alert: False + state_indice: False + vulnerability_data: + windows: + amd64: + - PACKAGE_NAME: "vlc" + PACKAGE_VERSION: "3.0.8" + CVE: CVE-2023-47360 + body: + tasks: + - operation: install_package + target: agent + package: + centos: + amd64: https://nmap.org/dist/nmap-7.00-1.x86_64.rpm + arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.20-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.20-1PGDG.rhel7.aarch64.rpm] + ubuntu: + amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_amd64.deb + arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb + windows: + amd64: https://get.videolan.org/vlc/3.0.7.1/win64/vlc-3.0.7.1-win64.exe + macos: + amd64: https://nodejs.org/dist/v18.0.0/node-v18.0.0.pkg + - operation: check_agent_vulnerability + target: agent + parameters: + alert_indexed: False + api: True + alert: False + state_indice: False + vulnerability_data: + centos: + amd64: + # Wrong package + - PACKAGE_NAME: "nmap" + PACKAGE_VERSION: "7.00" + CVE: CVE-2020-28924 + - PACKAGE_NAME: "nmap" + PACKAGE_VERSION: "7.00" + CVE: CVE-2018-1000161 + arm64v8: + - PACKAGE_NAME: "postgresql11" + PACKAGE_VERSION: "11.20" + CVE: CVE-2023-39417 + ubuntu: + amd64: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "9.1.1" + CVE: CVE-2023-1387 + arm64v8: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "9.1.1" + CVE: CVE-2023-1387 + windows: + amd64: + - PACKAGE_NAME: "vlc" + PACKAGE_VERSION: "3.0.7" + CVE: CVE-2019-13962 + STATUS: ABSENT + - PACKAGE_NAME: "vlc" + PACKAGE_VERSION: "3.0.7.1" + CVE: CVE-2019-14437 + # Wrong package + macos: + amd64: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.1.0" + CVE: CVE-2022-21824 + +# ----------------------------------------------------------- \ No newline at end of file diff --git a/tests/end_to_end/test_vulnerability_detector/data/configurations/agent.yaml b/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml similarity index 100% rename from tests/end_to_end/test_vulnerability_detector/data/configurations/agent.yaml rename to tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml diff --git a/tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml similarity index 100% rename from tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml rename to tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index fbee965f15..7ed35ccc4e 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -22,6 +22,7 @@ def test_example(host_manager): import pytest from wazuh_testing.tools.system import HostManager +from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations @pytest.fixture(scope='session') @@ -44,3 +45,21 @@ def host_manager(request): manager = HostManager(inventory_path) return manager + + +@pytest.fixture(scope='function') +def setup(preconditions, teardown, host_manager): + """ + """ + if preconditions: + print("Configuyring preconditions") + launch_parallel_operations(preconditions['tasks'], host_manager) + + yield + + if teardown: + print("Configuring teardonw") + launch_parallel_operations(teardown, host_manager) + + # for host in host_manager.get_group_hosts('manager'): + # host_manager.truncate_file(host, '/var/ossec/logs/alerts/alerts.json') \ No newline at end of file diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 5e1831232c..1939fc7b83 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -16,9 +16,9 @@ Additionally, the tests ensure the consistency of these values. Tests: - - test_syscollector_initial_agent_scan: - Validates the initiation of Syscollector scans across all agents in the environment. - Subsequently, it ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment. + - test_syscollector_initial_agent_scan: + Validates the initiation of Syscollector scans across all agents in the environment. + Subsequently, it ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment. The Agent's Vulnerability Indexer index is expected to be updated with the detected vulnerabilities. Issue: https://github.com/wazuh/wazuh-qa/issues/4369 @@ -43,6 +43,7 @@ import os import pytest import logging +import yaml from typing import Generator, Dict, List @@ -55,6 +56,7 @@ from wazuh_testing.end_to_end.regex import get_event_regex from wazuh_testing.end_to_end.indexer_api import get_indexer_values from wazuh_testing.tools.system import HostManager +from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations TIMEOUT_SYSCOLLECTOR_SCAN = 200 @@ -63,7 +65,7 @@ local_path = os.path.dirname(os.path.abspath(__file__)) current_dir = os.path.dirname(__file__) -configurations_dir = os.path.join(current_dir, "data", "configurations") +configurations_dir = os.path.join(current_dir, "configurations") configurations_paths = { 'manager': os.path.join(configurations_dir, 'manager.yaml'), 'agent': os.path.join(configurations_dir, 'agent.yaml') @@ -86,7 +88,7 @@ def load_vulnerability_detector_configurations(): def setup_vulnerability_tests(host_manager: HostManager) -> Generator: """Setup the vulnerability tests environment - Args: + Args: host_manager (HostManager): An instance of the HostManager class. """ logger.error("Init setup of environment") @@ -146,9 +148,9 @@ def check_vuln_state_index(host_manager: HostManager, agents_vulnerabilities_api @pytest.mark.filterwarnings('ignore::urllib3.exceptions.InsecureRequestWarning') def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): """ - description: Validates the initiation of Syscollector scans across all agents in the environment. + description: Validates the initiation of Syscollector scans across all agents in the environment. - This test ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment. + This test ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment. The Agent's Vulnerability Indexer index is expected to be updated with the detected vulnerabilities. tier: 0 @@ -231,3 +233,43 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): check_vuln_state_index(host_manager, agents_vuln_after_second_scan) +# ------------------------- + +cases = {} + +with open(os.path.join(current_dir, os.path.join('cases', 'test_vulnerability.yaml')), 'r') as cases_file: + cases = yaml.load(cases_file, Loader=yaml.FullLoader) + + + +complete_list = [ + ( + case['preconditions'] if 'preconditions' in case else None, + case['body'] if 'body' in case else None, + case['teardown'] if 'teardown' in case else None + ) + for case in cases +] + +dependencies = [None if 'depends' not in case else pytest.mark.depends(name=case['depend']) for case in cases] +list_ids = [ case['id'] for case in cases] + + +# @pytest.mark.dependency(depends=["test_syscollector_second_scan"]) +@pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) +def test_vulnerability_detector_scans(preconditions, body, teardown, setup, host_manager): + + # Launch tests tasks + launch_parallel_operations(body['tasks'], host_manager) + + # # Check vulnerability + # agents_vuln_after_second_scan = get_agents_vulnerabilities(host_manager) + + # Check alert in Wazuh Indexer + # monitoring_data = generate_monitoring_alerts_all_agent(host_manager, body['check_alerts']) + # expected_alerts = body['check_agent_alert_indexer'] + + # Check agent System state + # To Do + # results = monitoring_events(host_manager, monitoring_data) + # assert all(results.values()), f"Expected message was not triggered for some agents, {results}" From bcf316d4aeaa25f848f83dcc4e1953512a365005 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 24 Nov 2023 15:31:39 +0000 Subject: [PATCH 038/174] style: remove raw string from base_path --- deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py b/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py index af7de99c04..8a47616f32 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py @@ -14,7 +14,7 @@ base_path = { 'linux': '/var/ossec', - 'windows': r'C:\Program Files (x86)\ossec-agent', + 'windows': 'C:\Program Files (x86)\ossec-agent', 'macos': '/Library/Ossec' } From 4e4e379a3ce1c5ece3dff4f70a2451ab971719d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 24 Nov 2023 15:43:31 +0000 Subject: [PATCH 039/174] style: pep8 e2e monitoring module --- .../wazuh_testing/end_to_end/configuration.py | 37 ++++++++++++------- 1 file changed, 24 insertions(+), 13 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py index 3c6b8b31bd..7b1aaa6133 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -2,13 +2,14 @@ Configurations handler for remote hosts. ---------------------------------------- -This module provides functions for configuring and managing host configurations using the HostManager class and related tools. +This module provides functions for configuring and managing host +configurations using the HostManager class and related tools. Functions: - backup_configurations: Backup configurations for all hosts in the specified host manager. - restore_backup: Restore configurations for all hosts in the specified host manager. - configure_environment: Configure the environment for all hosts in the specified host manager. - + Copyright (C) 2015, Wazuh Inc. Created by Wazuh, Inc. . @@ -22,7 +23,6 @@ from wazuh_testing.tools.system import HostManager - def backup_configurations(host_manager: HostManager) -> dict: """ Backup configurations for all hosts in the specified host manager. @@ -33,10 +33,15 @@ def backup_configurations(host_manager: HostManager) -> dict: Returns: dict: A dictionary mapping host names to their configurations. """ - return { - str(host): host_manager.get_file_content(str(host), configuration_filepath_os[host_manager.get_host_variables(host)['os_name']]) - for host in host_manager.get_group_hosts('all') - } + backup_configurations = {} + for host in host_manager.get_group_hosts('all'): + host_os_name = host_manager.get_host_variables(host)['os_name'] + configuration_filepath = configuration_filepath_os[host_os_name] + + backup_configurations[host] = host_manager.get_file_content(str(host), + configuration_filepath) + + return backup_configurations def restore_backup(host_manager: HostManager, backup_configurations: dict) -> None: @@ -47,8 +52,13 @@ def restore_backup(host_manager: HostManager, backup_configurations: dict) -> No host_manager: An instance of the HostManager class containing information about hosts. backup_configurations: A dictionary mapping host names to their configurations. """ - [host_manager.modify_file_content(str(host), configuration_filepath_os[host_manager.get_host_variables(host)['os_name']], backup_configurations[str(host)]) - for host in host_manager.get_group_hosts('all')] + backup_configurations = {} + + for host in host_manager.get_group_hosts('all'): + host_os_name = host_manager.get_host_variables(host)['os_name'] + configuration_filepath = configuration_filepath_os[host_os_name] + + host_manager.modify_file_content(host, configuration_filepath, backup_configurations[host]) def configure_host(host: str, host_configuration_role: dict, host_manager: HostManager) -> None: @@ -66,9 +76,10 @@ def configure_host(host: str, host_configuration_role: dict, host_manager: HostM host_groups = host_manager.get_host_groups(host) host_config = host_configuration_role.get('manager' if 'manager' in host_groups else 'agent', None) - + if not host_config: - raise TypeError(f"Host {host} configuration does not include a valid role (manager or agent): {host_configuration_role}") + raise TypeError(f"Host {host} configuration does not include a valid role (manager or agent):" + "{host_configuration_role}") current_config = host_manager.get_file_content(str(host), config_file_path) new_config = set_section_wazuh_conf(host_config[0].get('sections'), current_config.split("\n")) @@ -88,5 +99,5 @@ def configure_environment(host_manager: HostManager, configurations: dict) -> No configure_environment_parallel_map = [(host, configurations) for host in host_manager.get_group_hosts('all')] with ThreadPool() as pool: - pool.starmap(configure_host, [(host, config, host_manager) for host, config in configure_environment_parallel_map]) - + pool.starmap(configure_host, + [(host, config, host_manager) for host, config in configure_environment_parallel_map]) From 0fcfb6d8302a9fc7edd86818f7afeb8fce7122c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 24 Nov 2023 15:46:47 +0000 Subject: [PATCH 040/174] style: rename restore backup conf function --- .../wazuh_testing/end_to_end/configuration.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py index 7b1aaa6133..6ae85187fa 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -44,21 +44,20 @@ def backup_configurations(host_manager: HostManager) -> dict: return backup_configurations -def restore_backup(host_manager: HostManager, backup_configurations: dict) -> None: +def restore_configuration(host_manager: HostManager, configuration: dict) -> None: """ Restore configurations for all hosts in the specified host manager. Args: host_manager: An instance of the HostManager class containing information about hosts. - backup_configurations: A dictionary mapping host names to their configurations. + configuration: A dictionary mapping host names to their configurations. """ - backup_configurations = {} for host in host_manager.get_group_hosts('all'): host_os_name = host_manager.get_host_variables(host)['os_name'] configuration_filepath = configuration_filepath_os[host_os_name] - host_manager.modify_file_content(host, configuration_filepath, backup_configurations[host]) + host_manager.modify_file_content(host, configuration_filepath, configuration[host]) def configure_host(host: str, host_configuration_role: dict, host_manager: HostManager) -> None: From e4f5036951461f1f288c95eaf503c3a6100857cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 24 Nov 2023 15:56:00 +0000 Subject: [PATCH 041/174] refac: make configure_host more readable --- .../wazuh_testing/end_to_end/configuration.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py index 6ae85187fa..405a8e6163 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -81,10 +81,23 @@ def configure_host(host: str, host_configuration_role: dict, host_manager: HostM "{host_configuration_role}") current_config = host_manager.get_file_content(str(host), config_file_path) - new_config = set_section_wazuh_conf(host_config[0].get('sections'), current_config.split("\n")) - new_config = "\n".join(xml.dom.minidom.parseString(''.join(new_config)).toprettyxml().split("\n")[1:]) - host_manager.modify_file_content(str(host), config_file_path, new_config) + # Extract the sections from the first element of host_config + + sections = host_config[0].get('sections') + + # Combine the current hos configuration and the desired configuration + new_config_unformatted = set_section_wazuh_conf(sections, current_config.split("\n")) + + # Format new configuration + new_config_formatted_xml = xml.dom.minidom.parseString(''.join(new_config_unformatted)) + + # Get rid of the first no expected XML version line + new_config_formatted_xml = new_config_formatted_xml.toprettyxml().split("\n")[1:] + + final_configuration = "\n".join(new_config_formatted_xml) + + host_manager.modify_file_content(str(host), config_file_path, final_configuration) def configure_environment(host_manager: HostManager, configurations: dict) -> None: From aa9a96a9670b56b0139c9498530b90de66499089 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 09:36:45 +0000 Subject: [PATCH 042/174] docs: remove Elasticsearch references --- .../wazuh_testing/end_to_end/indexer_api.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index ffe5059a34..f15f546e44 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -2,10 +2,10 @@ Wazuh Elasticsearch Indexer Module. ----------------------------------- -This module provides functions to interact with the Wazuh Elasticsearch indexer API. +This module provides functions to interact with the Wazuh Indexer API. Functions: - - get_indexer_values: Retrieves values from the Elasticsearch indexer API. + - get_indexer_values: Retrieves values from the Indexer API. Copyright (C) 2015, Wazuh Inc. @@ -20,7 +20,9 @@ STATE_INDEX_NAME = 'wazuh-vulnerabilities-states' -def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': 'admin', 'password': 'changeme'}, index: str = 'wazuh-alerts*') -> Dict: + +def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': 'admin', 'password': 'changeme'}, + index: str = 'wazuh-alerts*') -> Dict: """ Get values from the Wazuh Elasticsearch indexer API. @@ -42,4 +44,3 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' response = requests.get(url=url, params={'pretty': 'true'}, json=query, verify=False, auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password'])) return response.json() - From 2f1f35f533530fc91a4bb72888e6605689178f3b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 09:38:23 +0000 Subject: [PATCH 043/174] docs: remove Elasticsearch from functions docstrings --- deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index f15f546e44..6e79904340 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -24,13 +24,13 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': 'admin', 'password': 'changeme'}, index: str = 'wazuh-alerts*') -> Dict: """ - Get values from the Wazuh Elasticsearch indexer API. + Get values from the Wazuh Indexer API. Args: host_manager: An instance of the HostManager class containing information about hosts. - credentials (Optional): A dictionary containing the Elasticsearch credentials. Defaults to + credentials (Optional): A dictionary containing the Indexer credentials. Defaults to {'user': 'admin', 'password': 'changeme'}. - index (Optional): The Elasticsearch index name. Defaults to 'wazuh-alerts*'. + index (Optional): The Indexer index name. Defaults to 'wazuh-alerts*'. Returns: str: The response text from the indexer API. From a8c21e36cc2cc9218a75ffee8e43657b80c5eb9e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 09:41:48 +0000 Subject: [PATCH 044/174] style: fix PEP8 style errors --- deps/wazuh_testing/wazuh_testing/end_to_end/logs.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py index 05d34b302c..7bf04d475c 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py @@ -18,7 +18,7 @@ """ from wazuh_testing.end_to_end import logs_filepath_os -from wazuh_testing.tools.system import HostManager +from wazuh_testing.tools.system import HostManager def truncate_agents_logs(host_manager: HostManager) -> None: @@ -32,6 +32,7 @@ def truncate_agents_logs(host_manager: HostManager) -> None: host_os_name = host_manager.get_host_variables(agent)['os_name'] host_manager.truncate_file(agent, logs_filepath_os[host_os_name]) + def truncate_managers_logs(host_manager: HostManager) -> None: """ Truncate logs for Wazuh managers. @@ -43,6 +44,7 @@ def truncate_managers_logs(host_manager: HostManager) -> None: host_os_name = host_manager.get_host_variables(manager)['os_name'] host_manager.truncate_file(manager, logs_filepath_os[host_os_name]) + def truncate_logs(host_manager: HostManager) -> None: """ Truncate logs for both Wazuh agents and managers. @@ -53,6 +55,7 @@ def truncate_logs(host_manager: HostManager) -> None: truncate_managers_logs(host_manager) truncate_agents_logs(host_manager) + def truncate_alerts(host_manager: HostManager) -> None: """ Truncate Wazuh alerts. @@ -62,4 +65,3 @@ def truncate_alerts(host_manager: HostManager) -> None: """ for manager in host_manager.get_group_hosts('manager'): host_manager.truncate_file(manager, '/var/ossec/logs/alerts/alerts.json') - From 8b456ef3aacf13772c90ca832005a51cca57de34 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 09:46:04 +0000 Subject: [PATCH 045/174] style: fix pep8 in monitoring module --- deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index 4908196625..4d6f4d0133 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -48,7 +48,8 @@ def monitoring_events_host_monitoring(host_manager: HostManager, monitoring_data monitoring_file_content += f"{host}:\n" for monitoring_event in data: string_limiter = "'" if '"' in monitoring_event.get("regex", "") else '"' - monitoring_file_content += f' - regex: {string_limiter}{monitoring_event.get("regex", "")}{string_limiter}\n' + monitoring_file_content += f' - regex: {string_limiter}{monitoring_event.get("regex", "")}{string_limiter}' + '\n' monitoring_file_content += f' file: {string_limiter}{monitoring_event.get("file", "")}{string_limiter}\n' monitoring_file_content += f' timeout: {monitoring_event.get("timeout", 0)}\n' @@ -74,7 +75,7 @@ def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict]): """ Monitor the specified elements on a host. - + Args: host_manager (HostManager): Host Manager to handle the environment host (str): The target host. From 300dbccdf263962b902104dc486f266c518774ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 09:57:07 +0000 Subject: [PATCH 046/174] refac: remove monitoring_events_host_monitoring function --- .../wazuh_testing/end_to_end/monitoring.py | 37 ------------------- 1 file changed, 37 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index 4d6f4d0133..1faebd8728 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -6,7 +6,6 @@ This module provides functions for monitoring events, files, and alerts in a Wazuh environment. Functions: - - monitoring_events_host_monitoring: Monitor events on hosts using the HostMonitor. - monitoring_events_multihost: Monitor events on multiple hosts concurrently. - generate_monitoring_logs_all_agent: Generate monitoring data for logs on all agent hosts. - generate_monitoring_logs_manager: Generate monitoring data for logs on a specific manager host. @@ -18,52 +17,16 @@ This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ -import tempfile import re from time import sleep from typing import Dict, List from multiprocessing.pool import ThreadPool from wazuh_testing.end_to_end import logs_filepath_os -from wazuh_testing.tools.file import create_temp_file -from wazuh_testing.tools.monitoring import HostMonitor from wazuh_testing.end_to_end.regex import get_event_regex from wazuh_testing.tools.system import HostManager -def monitoring_events_host_monitoring(host_manager: HostManager, monitoring_data: Dict) -> Dict: - """Monitor events on hosts using the HostMonitor class. - - Args: - host_manager: An instance of the HostManager class containing information about hosts. - monitoring_data: A dictionary containing monitoring data for each host. - - Returns: - dict: Results of the monitoring process. - """ - monitoring_file_content = '' - results = {} - - for host, data in monitoring_data.items(): - monitoring_file_content += f"{host}:\n" - for monitoring_event in data: - string_limiter = "'" if '"' in monitoring_event.get("regex", "") else '"' - monitoring_file_content += f' - regex: {string_limiter}{monitoring_event.get("regex", "")}{string_limiter}' - '\n' - monitoring_file_content += f' file: {string_limiter}{monitoring_event.get("file", "")}{string_limiter}\n' - monitoring_file_content += f' timeout: {monitoring_event.get("timeout", 0)}\n' - - temp_file = create_temp_file(monitoring_file_content) - - temporal_directory = tempfile.TemporaryDirectory() - - HostMonitor(inventory_path=host_manager.get_inventory_path(), - messages_path=temp_file, - tmp_path=temporal_directory.name).run() - - return results - - def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict) -> None: """ Monitor events on multiple hosts concurrently. From fba8d1663b6691ab562bbcab91f8dcf2480ab711 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 10:01:29 +0000 Subject: [PATCH 047/174] feat: include interval parameters to monitoring function --- deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index 1faebd8728..1ce025c47a 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -35,7 +35,7 @@ def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict host_manager: An instance of the HostManager class containing information about hosts. monitoring_data: A dictionary containing monitoring data for each host. """ - def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict]): + def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict], scan_interval: int): """ Monitor the specified elements on a host. @@ -47,6 +47,7 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: Raises: TimeoutError: If no match is found within the specified timeout. """ + for element in monitoring_elements: regex, timeout, monitoring_file = element['regex'], element['timeout'], element['file'] current_timeout = 0 @@ -57,7 +58,8 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: if regex_match: break - sleep(5) + sleep(scan_interval) + current_timeout += 5 if not regex_match: From e14ce41116057668a529db04ad50c4617806698d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 10:09:25 +0000 Subject: [PATCH 048/174] style: fix pep8 style errors in regex module --- deps/wazuh_testing/wazuh_testing/end_to_end/regex.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py index 549beb840a..5a0c3d3866 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py @@ -28,19 +28,21 @@ 'regex': '.*INFO: Evaluation finished.' }, 'syscollector_install_package_alert_yum': { - 'regex': '.*installed.*agent".*"name":"(\S+)".*Installed: (\S+).*?(\S+)', + 'regex': '.*installed.*agent".*"name":"(\\S+)".*Installed: (\\S+).*?(\\S+)', 'parameters': ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] }, 'syscollector_install_package_alert_apt': { - 'regex': '.*New dpkg \(Debian Package\) installed.*.*agent".*"name":"(\S+).*package":"(\S+)","arch":"amd64","version":"(\S+)"', + 'regex': '.*New dpkg \\(Debian Package\\) installed.*.*agent".*"name":"(\\S+).*package":"(\\S+)",' + '"arch":"amd64","version":"(\\S+)"', 'parameters': ['HOST_NAME', 'PACKAGE_NAME', 'PACKAGE_VERSION'] }, 'syscollector_upgrade_package_alert_yum': { - 'regex': '.*Yum package updated.*agent".*"name":"(\S+)".*Updated: (\S+).*?(\S+)', + 'regex': '.*Yum package updated.*agent".*"name":"(\\S+)".*Updated: (\\S+).*?(\\S+)', 'parameters': ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] }, 'vulnerability_alert': { - 'regex': '.*HOST_NAME.*package":.*name":"PACKAGE_NAME".*version":"PACKAGE_VERSION".*"architecture":"ARCHITECTURE.*"cve":"CVE"', + 'regex': '.*HOST_NAME.*package":.*name":"PACKAGE_NAME".*version":"PACKAGE_VERSION".*"' + 'architecture":"ARCHITECTURE.*"cve":"CVE"', 'parameters': ['HOST_NAME', 'CVE', 'PACKAGE_NAME', 'PACKAGE_VERSION', 'ARCHITECTURE'] } } @@ -74,5 +76,3 @@ def get_event_regex(event: Dict): expected_regex = expected_regex.replace(parameter, event['parameters'].get(parameter, '')) return expected_regex - - From d7468463ae7249d3abdaa88926d1ff75f29d76cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 10:10:59 +0000 Subject: [PATCH 049/174] style: include typing for get_regex function --- deps/wazuh_testing/wazuh_testing/end_to_end/regex.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py index 5a0c3d3866..36dfba39e1 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py @@ -48,7 +48,7 @@ } -def get_event_regex(event: Dict): +def get_event_regex(event: Dict) -> str: """ Get the regex pattern for a specific event. From 33f6ff8216a3355636f6f04ee5acf4994f835d84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 10:19:38 +0000 Subject: [PATCH 050/174] style: pep8 for test vulnerability E2E --- .../test_vulnerability_detector.py | 28 +++++++++---------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 5e1831232c..b3d819368a 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -12,13 +12,13 @@ This module contains basic tests to ensure the proper functionality of the Vulnerability Detector. The tests validate that initial Syscollector scans, along with installation, removal, and uninstall operations, trigger Vulnerability Detector scans, generating the expected vulnerabilities. - The verification of vulnerabilities is conducted through the Wazuh Indexer, Agents' state index, and Wazuh API vulnerability endpoints. + The verification of vulnerabilities is conducted through Vulnerabilities Index and API endpoint Additionally, the tests ensure the consistency of these values. Tests: - - test_syscollector_initial_agent_scan: - Validates the initiation of Syscollector scans across all agents in the environment. - Subsequently, it ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment. + - test_syscollector_initial_agent_scan: + Validates the initiation of Syscollector scans across all agents in the environment. + Subsequently, it ensures that the Vulnerability Detector detects vulnerabilities within the environment. The Agent's Vulnerability Indexer index is expected to be updated with the detected vulnerabilities. Issue: https://github.com/wazuh/wazuh-qa/issues/4369 @@ -47,9 +47,9 @@ from typing import Generator, Dict, List from wazuh_testing.tools.configuration import load_configuration_template -from wazuh_testing.end_to_end.configuration import backup_configurations, restore_backup, configure_environment +from wazuh_testing.end_to_end.configuration import backup_configurations, restore_configuration, configure_environment from wazuh_testing.end_to_end.logs import truncate_agents_logs, truncate_managers_logs, truncate_logs -from wazuh_testing.end_to_end.wazuh_api import get_agents_vulnerabilities +from wazuh_testing.end_to_end.wazuh_api import get_agents_vulnerabilities from wazuh_testing.end_to_end.waiters import wait_until_vd_is_updated, wait_until_vuln_scan_agents_finished from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_all_agent, monitoring_events_multihost from wazuh_testing.end_to_end.regex import get_event_regex @@ -86,7 +86,7 @@ def load_vulnerability_detector_configurations(): def setup_vulnerability_tests(host_manager: HostManager) -> Generator: """Setup the vulnerability tests environment - Args: + Args: host_manager (HostManager): An instance of the HostManager class. """ logger.error("Init setup of environment") @@ -118,7 +118,7 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: yield logger.error("Restoring original configuration") - restore_backup(host_manager, hosts_configuration_backup) + restore_configuration(host_manager, hosts_configuration_backup) def check_vuln_state_index(host_manager: HostManager, agents_vulnerabilities_api_value: Dict[str, List[Dict]]): @@ -146,9 +146,9 @@ def check_vuln_state_index(host_manager: HostManager, agents_vulnerabilities_api @pytest.mark.filterwarnings('ignore::urllib3.exceptions.InsecureRequestWarning') def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): """ - description: Validates the initiation of Syscollector scans across all agents in the environment. + description: Validates the initiation of Syscollector scans across all agents in the environment. - This test ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment. + This test ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment. The Agent's Vulnerability Indexer index is expected to be updated with the detected vulnerabilities. tier: 0 @@ -165,7 +165,7 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): - Verify that syscollector scan is started after agent start - Verify that Vulnerability scan is performed for all the agent - Verify that vulnerabilities are generated for each agent (Check vulnerabilities using Wazuh API) - - Verify that Agent's Vulnerabilities index is updated with the agent vulnerabilities, being conssitent with the API results + - Verify that Agent's Vulnerabilities index is updated and is conssitent with the API results - Verify that second scan is performed in expected timeframe - Verify that no new vulnearbilities are detected since the first scan @@ -180,7 +180,7 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): monitoring_data = generate_monitoring_logs_all_agent(host_manager, [get_event_regex({'event': 'syscollector_scan_start'}), get_event_regex({'event': 'syscollector_scan_end'})], - [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) monitoring_events_multihost(host_manager, monitoring_data) @@ -211,7 +211,7 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): monitoring_data = generate_monitoring_logs_all_agent(host_manager, [get_event_regex({'event': 'syscollector_scan_start'}), get_event_regex({'event': 'syscollector_scan_end'})], - [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) monitoring_events_multihost(host_manager, monitoring_data) @@ -229,5 +229,3 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): # Compare agents_vuln_after_second_scan with state_index_content logger.critical("Comparing state and API vulnerabilities for each agent") check_vuln_state_index(host_manager, agents_vuln_after_second_scan) - - From 9aa7fa5a8c25e5d5a80c1063f76eba0051a6d175 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 10:24:24 +0000 Subject: [PATCH 051/174] style: rename manager to host_manager Co-authored-by: Julia Magan --- tests/end_to_end/test_vulnerability_detector/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index fbee965f15..989ae7fa73 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -41,6 +41,6 @@ def host_manager(request): ValueError: If the specified inventory path is invalid or not provided. """ inventory_path = request.config.getoption('--inventory-path') - manager = HostManager(inventory_path) + host_manager = HostManager(inventory_path) - return manager + return host_manager From d50fda825e6f8c7c4a7afc08514c4cbbc062dedb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 10:34:18 +0000 Subject: [PATCH 052/174] refac: remove unnused yaml regex file Co-authored-by: Julia Magan --- .../end_to_end/regexes/regex.yaml | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/regexes/regex.yaml diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/regexes/regex.yaml b/deps/wazuh_testing/wazuh_testing/end_to_end/regexes/regex.yaml deleted file mode 100644 index fdce5052d2..0000000000 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/regexes/regex.yaml +++ /dev/null @@ -1,18 +0,0 @@ -### Syscollector Events -syscollector_scan_start: - regex: ".*INFO: Starting evaluation." - -syscollector_scan_end: - regex: ".*INFO: Starting evaluation." - -syscollector_install_package_alert_yum: - regex: '.*installed.*agent".*"name":"HOST_NAME".*Installed: PACKAGE_NAME.*PACKAGE_VERSION' - parameters: ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] - -syscollector_install_package_alert_apt: - regex: '.*New dpkg \(Debian Package\) installed.*.*agent".*"name":"HOST_NAME.*package":"PACKAGE_NAME","arch":"amd64","version":"PACKAGE_VERSION"' - parameters: ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] - -syscollector_upgrade_package_alert_yum: - regex: '.*Yum package updated.*agent".*"name":"HOST_NAME".*Updated: PACKAGE_NAME.*PACKAGE_VERSION' - parameters: ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME'] From a6cc0e55b7bb674d66f2d809b8fc83b5efa0784d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 10:34:50 +0000 Subject: [PATCH 053/174] style: renamed some macos configuration tasks Co-authored-by: Julia Magan --- provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml index 7ad4242218..3fbf560a03 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml @@ -7,7 +7,7 @@ - include_tasks: "installation_from_custom_packages.yml" when: wazuh_custom_packages_installation_agent_enabled -- name: macOS | Installing agent configuration (ossec.conf) +- name: macOS | Set agent configuration (ossec.conf) template: src: var-ossec-etc-ossec-agent.conf.j2 dest: "{{ macos_wazuh_dir }}/etc/ossec.conf" @@ -26,7 +26,7 @@ tags: - config -- name: macOS | Installing local_internal_options.conf +- name: macOS | Set local_internal_options.conf template: src: var-ossec-etc-local-internal-options.conf.j2 dest: "{{ macos_wazuh_dir }}/etc/local_internal_options.conf" @@ -51,3 +51,4 @@ - authd_pass | length > 0 tags: - config + From 063334fbc8cb263a2d70ae266b372aa620dea86c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 10:35:27 +0000 Subject: [PATCH 054/174] style: added final empty line to ansible playbook Co-authored-by: Julia Magan --- provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml index 56d5a281eb..ca898e3512 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml @@ -6,4 +6,5 @@ win_service: name=WazuhSvc start_mode=auto state=restarted - name: MacOS | restart wazuh-agent - ansible.builtin.shell: "{{ wazuh_macos_dir }}/bin/wazuh-control restart" \ No newline at end of file + ansible.builtin.shell: "{{ wazuh_macos_dir }}/bin/wazuh-control restart" + From 310437f8ba546ca7c070a3c222a24500b956fcb9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 10:45:03 +0000 Subject: [PATCH 055/174] style: fix pep8 style errors of remote handler module Co-authored-by: Julia Magan --- .../wazuh_testing/end_to_end/remote_operations_handler.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index ab9f63904c..faa8f71ccc 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -2,7 +2,10 @@ Remote Operations Module. ------------------------- -This module provides functions for launching remote operations on hosts and managing vulnerability checks. It utilizes the Wazuh testing framework, including the HostManager class for handling remote hosts and various tools for indexer API interactions. +This module provides functions for launching remote operations on hosts and managing vulnerability checks. + +It utilizes the Wazuh testing framework, including the HostManager class for handling +remote hosts and various tools for indexer API interactions. Functions: - launch_remote_operation: Launch a remote operation on a specified host. @@ -25,7 +28,7 @@ from wazuh_testing.tools.system import HostManager -def launch_remote_operation(host: str, operation_data: Dict[str,Dict], host_manager: HostManager): +def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): """ Launch a remote operation on the specified host. From b8212e32a52be121af9f6e3bc76b1a6df3803bb6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 10:48:47 +0000 Subject: [PATCH 056/174] fix: renamed todo function to check vd consistency Co-authored-by: Julia Magan --- .../test_vulnerability_detector.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index b3d819368a..8a8d406d5d 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -37,7 +37,7 @@ - tier0 ToDo: - - check_vuln_state_index: Function to ensure the consitency of the agent's vulnerabilities + - check_vuln_state_consistency: Function to ensure the consitency of the agent's vulnerabilities - Check if wait_until_vuln_scan_agents_finished function works as expected for the second scan """ import os @@ -121,7 +121,7 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: restore_configuration(host_manager, hosts_configuration_backup) -def check_vuln_state_index(host_manager: HostManager, agents_vulnerabilities_api_value: Dict[str, List[Dict]]): +def check_vuln_state_consistency(host_manager: HostManager, agents_vulnerabilities_api_value: Dict[str, List[Dict]]): """Check the consistency of the vulnerabilities included in the indexer. This function ensures that the vulnerabilities gathered from the Wazuh API are included in the @@ -200,7 +200,7 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): # Compare agents_vuln_before_second_scan with state_index_content logger.critical("Comparing state and API vulnerabilities for each agent") - check_vuln_state_index(host_manager, agents_vuln_before_second_scan) + check_vuln_state_consistency(host_manager, agents_vuln_before_second_scan) # Truncate manager_logs to prevent trigger wait_until_vuln_scan_agents_finished wrongly logger.critical("Truncating manager logs") @@ -228,4 +228,4 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): # Compare agents_vuln_after_second_scan with state_index_content logger.critical("Comparing state and API vulnerabilities for each agent") - check_vuln_state_index(host_manager, agents_vuln_after_second_scan) + check_vuln_state_consistency(host_manager, agents_vuln_after_second_scan) From 64909e7adf8b34a37c57e578e64f654d70ef3d24 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 10:54:25 +0000 Subject: [PATCH 057/174] refac: rename basic cases e2e environment name Co-authored-by: Julia Magan --- .../{end_to_end_environment.yaml => e2e_basic_cases.yaml} | 0 tests/system/provisioning/environment_test_dictionary.json | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename provisioning/environments/{end_to_end_environment.yaml => e2e_basic_cases.yaml} (100%) diff --git a/provisioning/environments/end_to_end_environment.yaml b/provisioning/environments/e2e_basic_cases.yaml similarity index 100% rename from provisioning/environments/end_to_end_environment.yaml rename to provisioning/environments/e2e_basic_cases.yaml diff --git a/tests/system/provisioning/environment_test_dictionary.json b/tests/system/provisioning/environment_test_dictionary.json index 3de2bfed9d..b098a53aba 100644 --- a/tests/system/provisioning/environment_test_dictionary.json +++ b/tests/system/provisioning/environment_test_dictionary.json @@ -43,7 +43,7 @@ "system/test_cluster/test_agent_groups/test_groups_sync_default.py", "system/test_cluster/test_agent_groups/test_groups_sync_time.py" ], - "end_to_end_environment": [ + "e2e_basic_cases": [ "test_basic_cases/test_audit/test_audit.py", "test_basic_cases/test_aws_infrastructure_monitoring/test_aws_infrastructure_monitoring.py", "test_basic_cases/test_brute_force/test_brute_force_rdp/test_brute_force_rdp.py", From cbb9a7aed2fed76378dee912c052cde15ec6f4a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 10:55:54 +0000 Subject: [PATCH 058/174] style: fix imports in VD tests Co-authored-by: Julia Magan --- .../test_vulnerability_detector/test_vulnerability_detector.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 8a8d406d5d..8c8587411e 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -43,7 +43,6 @@ import os import pytest import logging - from typing import Generator, Dict, List from wazuh_testing.tools.configuration import load_configuration_template From 4141f0ef9e5dc9e8772e5a53c408adca1b0befa3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 11:08:55 +0000 Subject: [PATCH 059/174] style: remove extra white space in ansible task --- provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml index ca898e3512..9b96391f3a 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml @@ -7,4 +7,3 @@ - name: MacOS | restart wazuh-agent ansible.builtin.shell: "{{ wazuh_macos_dir }}/bin/wazuh-control restart" - From e0a7933b090ab581352ed7785e3a725ad3a6cd65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 11:26:52 +0000 Subject: [PATCH 060/174] refac: logs E2E module Co-authored-by Julia Magan --- .../wazuh_testing/end_to_end/logs.py | 63 ++++++------------- .../test_vulnerability_detector.py | 10 +-- 2 files changed, 23 insertions(+), 50 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py index 7bf04d475c..ebf0f5940a 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py @@ -6,62 +6,35 @@ This module provides functions for truncating logs and alerts for Wazuh agents and managers. Functions: - - truncate_agents_logs: Truncate logs for Wazuh agents. - - truncate_managers_logs: Truncate logs for Wazuh managers. - - truncate_logs: Truncate logs for both Wazuh agents and managers. - - truncate_alerts: Truncate Wazuh alerts. + - truncate_remote_host_group_files: Truncate the specified files in all the host of a group Copyright (C) 2015, Wazuh Inc. Created by Wazuh, Inc. . This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ - +from wazuh_testing import ALERTS_JSON_PATH from wazuh_testing.end_to_end import logs_filepath_os from wazuh_testing.tools.system import HostManager -def truncate_agents_logs(host_manager: HostManager) -> None: - """ - Truncate logs for Wazuh agents. - - Args: - host_manager: An instance of the HostManager class containing information about hosts. - """ - for agent in host_manager.get_group_hosts('agent'): - host_os_name = host_manager.get_host_variables(agent)['os_name'] - host_manager.truncate_file(agent, logs_filepath_os[host_os_name]) - - -def truncate_managers_logs(host_manager: HostManager) -> None: +def truncate_remote_host_group_files(host_manager: HostManager, host_group: str, + file_to_truncate: str='logs'): """ - Truncate logs for Wazuh managers. + Truncate log or alert files on remote hosts in a specified host group. - Args: - host_manager: An instance of the HostManager class containing information about hosts. + Parameters: + - host_manager (HostManager): An instance of the HostManager class for managing remote hosts. + - host_group (str): The name of the host group where the files will be truncated. + - file_to_truncate (str, optional): The type of file to truncate. Default is 'logs'. + Possible values are 'logs' for log files or 'alerts' for alert files. """ - for manager in host_manager.get_group_hosts('manager'): - host_os_name = host_manager.get_host_variables(manager)['os_name'] - host_manager.truncate_file(manager, logs_filepath_os[host_os_name]) + for host in host_manager.get_group_hosts(host_group): + log_file_path = None + if file_to_truncate == 'logs': + host_os_name = host_manager.get_host_variables(host)['os_name'] + log_file_path = logs_filepath_os[host_os_name] + elif file_to_truncate == 'alerts': + log_file_path = ALERTS_JSON_PATH - -def truncate_logs(host_manager: HostManager) -> None: - """ - Truncate logs for both Wazuh agents and managers. - - Args: - host_manager: An instance of the HostManager class containing information about hosts. - """ - truncate_managers_logs(host_manager) - truncate_agents_logs(host_manager) - - -def truncate_alerts(host_manager: HostManager) -> None: - """ - Truncate Wazuh alerts. - - Args: - host_manager: An instance of the HostManager class containing information about hosts. - """ - for manager in host_manager.get_group_hosts('manager'): - host_manager.truncate_file(manager, '/var/ossec/logs/alerts/alerts.json') + host_manager.truncate_file(host, log_file_path) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 8c8587411e..2148990c0b 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -45,14 +45,14 @@ import logging from typing import Generator, Dict, List -from wazuh_testing.tools.configuration import load_configuration_template from wazuh_testing.end_to_end.configuration import backup_configurations, restore_configuration, configure_environment -from wazuh_testing.end_to_end.logs import truncate_agents_logs, truncate_managers_logs, truncate_logs +from wazuh_testing.end_to_end.logs import truncate_remote_host_group_files from wazuh_testing.end_to_end.wazuh_api import get_agents_vulnerabilities from wazuh_testing.end_to_end.waiters import wait_until_vd_is_updated, wait_until_vuln_scan_agents_finished from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_all_agent, monitoring_events_multihost from wazuh_testing.end_to_end.regex import get_event_regex from wazuh_testing.end_to_end.indexer_api import get_indexer_values +from wazuh_testing.tools.configuration import load_configuration_template from wazuh_testing.tools.system import HostManager @@ -108,7 +108,7 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: # Truncate alerts and logs of managers and agents logger.error("Truncate managers and agents logs") - truncate_logs(host_manager) + truncate_remote_host_group_files(host_manager, 'all', 'logs') # Start agents logger.error("Starting agents") @@ -185,7 +185,7 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): # Truncate agents logs to detect second scan logger.critical("Truncating agent's logs") - truncate_agents_logs(host_manager) + truncate_remote_host_group_files(host_manager, 'agent', 'logs') # Wait until all agents has been scanned logger.critical("Waiting until agent's VD scan is over") @@ -203,7 +203,7 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): # Truncate manager_logs to prevent trigger wait_until_vuln_scan_agents_finished wrongly logger.critical("Truncating manager logs") - truncate_managers_logs(host_manager) + truncate_remote_host_group_files(host_manager, 'manager', 'logs') logger.critical("Monitoring Second Syscollector scan") # The Agent's syscollector second scan is run From feb16469d0897ad3404c7d744d43bc71d244a40d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 11:47:06 +0000 Subject: [PATCH 061/174] fix: truncate logs after VD tests module --- .../test_vulnerability_detector.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 2148990c0b..9dcac45fd0 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -116,9 +116,18 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: yield + + # Truncate alerts and logs of managers and agents + logger.error("Truncate managers and agents logs") + truncate_remote_host_group_files(host_manager, 'all', 'logs') + logger.error("Restoring original configuration") restore_configuration(host_manager, hosts_configuration_backup) + logger.error("Restarting environment") + host_manager.control_environment('restart', ['agent']) + host_manager.control_environment('restart', ['manager']) + def check_vuln_state_consistency(host_manager: HostManager, agents_vulnerabilities_api_value: Dict[str, List[Dict]]): """Check the consistency of the vulnerabilities included in the indexer. From ff3c3374b732b99479a1beae594afe92464ace66 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 11:50:24 +0000 Subject: [PATCH 062/174] refac: include TIMEOUT_SYSCOLLECTOR to syscollector module Co-authored-by Julia Magan --- .../wazuh_testing/modules/syscollector/__init__.py | 1 + .../test_vulnerability_detector/test_vulnerability_detector.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py b/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py index e69de29bb2..a746f4bd76 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py @@ -0,0 +1 @@ +TIMEOUT_SYSCOLLECTOR_SCAN = 200 diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 9dcac45fd0..1101791ab0 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -54,9 +54,9 @@ from wazuh_testing.end_to_end.indexer_api import get_indexer_values from wazuh_testing.tools.configuration import load_configuration_template from wazuh_testing.tools.system import HostManager +from wazuh_testing.modules.syscollector import TIMEOUT_SYSCOLLECTOR_SCAN -TIMEOUT_SYSCOLLECTOR_SCAN = 200 logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) From 505cd9d006e1b65887c8a958dbf0f2906304a24f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 11:54:36 +0000 Subject: [PATCH 063/174] refac: unnused environment file --- .../vulnerability_detector_tests.yaml | 32 ------------------- 1 file changed, 32 deletions(-) delete mode 100644 provisioning/environments/vulnerability_detector_tests.yaml diff --git a/provisioning/environments/vulnerability_detector_tests.yaml b/provisioning/environments/vulnerability_detector_tests.yaml deleted file mode 100644 index b236164db9..0000000000 --- a/provisioning/environments/vulnerability_detector_tests.yaml +++ /dev/null @@ -1,32 +0,0 @@ -manager1: - roles: [manager, filebeat, indexer] - os: ubuntu_22 - type: master -resources: - cpu: 4 - memory: 8192 - -manager2: - roles: [manager, filebeat] - os: ubuntu_22 - type: worker - -agent1: - roles: [agent] - os: centos_7 - manager: manager1 - -agent2: - roles: [agent] - os: windows_11 - manager: manager1 - -agent3: - roles: [agent] - os: macos_1015 - manager: manager2 - -agent4: - roles: [agent] - os: ubuntu_22 - manager: manager2 From 372138c2005beb6fd763e7061230c2ddc9c074e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 11:56:04 +0000 Subject: [PATCH 064/174] fix: add break in get master ip --- deps/wazuh_testing/wazuh_testing/tools/system.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index bfb3f7fe7f..f32e72c6b5 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -475,7 +475,7 @@ def install_package(self, host, url, system='ubuntu'): Supported values: 'windows', 'ubuntu', 'centos'. Returns: - Dict: Testinfra Ansible Response of the operation + Dict: Testinfra Ansible Response of the operation Example: host_manager.install_package('my_host', 'http://example.com/package.deb', system='ubuntu') @@ -511,6 +511,7 @@ def get_master_ip(self): if 'type' in self.get_host_variables(manager) and \ self.get_host_variables(manager)['type'] == 'master': master_ip = self.get_host_variables(manager)['ip'] + break return master_ip @@ -525,7 +526,7 @@ def remove_package(self, host, package_name, system): Supported values: 'windows', 'ubuntu', 'centos'. Returns: - Dict: Testinfra Ansible Response of the operation + Dict: Testinfra Ansible Response of the operation Example: host_manager.remove_package('my_host', 'my_package', system='ubuntu') @@ -600,4 +601,3 @@ def clean_environment(host_manager, target_files): """ for target in target_files: host_manager.clear_file(host=target[0], file_path=target[1]) - From cce910b38a8249ce2691c44e789458c308b24687 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 11:57:41 +0000 Subject: [PATCH 065/174] refac: install package from system module Co-authored-by Julia Magan --- deps/wazuh_testing/wazuh_testing/tools/system.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index f32e72c6b5..f934154cad 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -486,11 +486,11 @@ def install_package(self, host, url, system='ubuntu'): result = self.get_host(host).ansible("win_package", f"path={url} arguments=/S", check=False) elif system == 'ubuntu': result = self.get_host(host).ansible("apt", f"deb={url}", check=False) - if result['changed'] == True and result['stderr'] == '': + if result['changed'] and result['stderr'] == '': result = True elif system == 'centos': result = self.get_host(host).ansible("yum", f"name={url} state=present sslverify=false disable_gpg_check=True", check=False) - if 'rc' in result and result['rc'] == 0 and result['changed'] == True: + if 'rc' in result and result['rc'] == 0 and result['changed']: result = True return result From 5aac515d1e4fa8eadec271b391dbd9363efa6969 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 12:00:21 +0000 Subject: [PATCH 066/174] refac: move file host manager Co-authored-by Julia Magan --- deps/wazuh_testing/wazuh_testing/tools/system.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index f934154cad..c85e028de8 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -147,16 +147,11 @@ def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/o system = 'linux' result = None - if 'os_name' in self.get_host_variables(host): - host_os_name = self.get_host_variables(host)['os_name'] - if host_os_name == 'windows': - system = 'windows' - - if system == 'linux': + if self.get_host_variables(host)['os_name'] == 'windows': + result = self.get_host(host).ansible("ansible.windows.win_copy", f"src='{src_path}' dest='{dest_path}'", check=check) + else: result = self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=0644", check=check) - else: - result = self.get_host(host).ansible("ansible.windows.win_copy", f"src='{src_path}' dest='{dest_path}'", check=check) return result From f213e5be52cab38448c60cc91f3e0e2dfcf266d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 12:02:55 +0000 Subject: [PATCH 067/174] fix: move_file preserve by defaul the permissions Co-authored-by Julia Magan --- deps/wazuh_testing/wazuh_testing/tools/system.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index c85e028de8..2c9e423821 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -150,7 +150,7 @@ def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/o if self.get_host_variables(host)['os_name'] == 'windows': result = self.get_host(host).ansible("ansible.windows.win_copy", f"src='{src_path}' dest='{dest_path}'", check=check) else: - result = self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=0644", + result = self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=preserve", check=check) return result From 9fa5a670a78a3911b03970232f4f595a17c29e7f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 12:05:56 +0000 Subject: [PATCH 068/174] refac: remove unnused var --- deps/wazuh_testing/wazuh_testing/tools/system.py | 1 - 1 file changed, 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 2c9e423821..3a8e1aef0b 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -144,7 +144,6 @@ def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/o dest_path (str): Destination path check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be applied. """ - system = 'linux' result = None if self.get_host_variables(host)['os_name'] == 'windows': From 5e4f1ab51da7394caf3a980ad5239b7bc133b697 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 27 Nov 2023 12:06:27 +0000 Subject: [PATCH 069/174] refac: imports in waiters module Co-authored-by Julia Magan --- deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index 83019fb3c8..ee29749743 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -19,10 +19,7 @@ This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ -from wazuh_testing.end_to_end.monitoring import ( - generate_monitoring_logs_manager, - monitoring_events_multihost -) +from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_manager, monitoring_events_multihost from wazuh_testing.end_to_end.wazuh_api import get_agents_id from wazuh_testing.tools.system import HostManager From dfa60712ac20ac8cf98808434d6834d98fef7b82 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 28 Nov 2023 06:40:15 +0000 Subject: [PATCH 070/174] fix: remove VD from template --- .../wazuh_testing/end_to_end/monitoring.py | 2 +- .../end_to_end/remote_operations_handler.py | 57 +++- .../wazuh_testing/tools/system.py | 2 +- .../var-ossec-etc-ossec-server.conf.j2 | 32 -- .../cases/test_vulnerability.yaml | 298 ++++++++++-------- .../test_vulnerability_detector.py | 9 +- 6 files changed, 219 insertions(+), 181 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index 1ce025c47a..f82260b32d 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -35,7 +35,7 @@ def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict host_manager: An instance of the HostManager class containing information about hosts. monitoring_data: A dictionary containing monitoring data for each host. """ - def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict], scan_interval: int): + def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict], scan_interval: int = 5): """ Monitor the specified elements on a host. diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 997af7952f..2d12eb2343 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -30,7 +30,7 @@ from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_all_agent, monitoring_events_multihost from wazuh_testing.end_to_end.waiters import wait_until_vuln_scan_agents_finished from wazuh_testing.end_to_end.regex import get_event_regex - +from wazuh_testing.end_to_end.logs import truncate_remote_host_group_files def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): """ @@ -60,8 +60,16 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man print("Installing package") package_data = operation_data['package'] package_url = package_data[host_os_name][host_os_arch] - host_manager.install_package(host, package_url, system) + + if isinstance(package_url, list): + for package in package_url: + host_manager.install_package(host, package, system) + else: + host_manager.install_package(host, package_url, system) + + TIMEOUT_SYSCOLLECTOR_SCAN = 60 + truncate_remote_host_group_files(host_manager, 'agent', 'logs') # Wait until syscollector monitoring_data = generate_monitoring_logs_all_agent(host_manager, @@ -71,16 +79,31 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man monitoring_events_multihost(host_manager, monitoring_data) + truncate_remote_host_group_files(host_manager, 'manager', 'logs') # Wait until VD scan wait_until_vuln_scan_agents_finished(host_manager) - elif operation == 'remove_package': - print("Removing package") package_data = operation_data['package'] package_name = package_data[host_os_name][host_os_arch] host_manager.remove_package(host, package_name, system) + TIMEOUT_SYSCOLLECTOR_SCAN = 60 + + truncate_remote_host_group_files(host_manager, 'agent', 'logs') + # Wait until syscollector + monitoring_data = generate_monitoring_logs_all_agent(host_manager, + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) + + monitoring_events_multihost(host_manager, monitoring_data) + + truncate_remote_host_group_files(host_manager, 'manager', 'logs') + + # Wait until VD scan + wait_until_vuln_scan_agents_finished(host_manager) + elif operation == 'check_agent_vulnerability': print("Check agent vuln") if operation_data['parameters']['alert_indexed']: @@ -89,7 +112,8 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man if operation_data['parameters']['api']: print("Check vuln in api response") - check_vuln_alert_api(host_manager, operation_data['vulnerability_data']) + check_vuln_alert_api(host_manager, operation_data['vulnerability_data'], + operation_data['parameters'].get('state', True)) if operation_data['parameters']['state_indice']: check_vuln_state_index(host_manager, operation_data['vulnerability_data']) @@ -127,7 +151,7 @@ def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict pass -def check_vuln_alert_api(host_manager: HostManager, vulnerability_data: Dict[str, Dict]): +def check_vuln_alert_api(host_manager: HostManager, vulnerability_data: Dict[str, Dict], state=True): """ Check vulnerability alerts via API for a host. @@ -141,8 +165,7 @@ def check_vuln_alert_api(host_manager: HostManager, vulnerability_data: Dict[str api_vulns = get_agents_vulnerabilities(host_manager) not_found_vuln = [] - - + found_vuln = [] for agent in host_manager.get_group_hosts('agent'): print("\n\n---------------------------------") @@ -164,6 +187,10 @@ def check_vuln_alert_api(host_manager: HostManager, vulnerability_data: Dict[str if all(current_vulnerability[key] == value for key, value in expected_vuln.items()): found = True print(f"Found {current_vulnerability}") + found_vuln.append({ + 'agent': agent, + 'cve': vulnerability['CVE'] + }) if not found: not_found_vuln.append({ @@ -172,14 +199,12 @@ def check_vuln_alert_api(host_manager: HostManager, vulnerability_data: Dict[str }) print("\n\n---------------------------------") - - print(f"No found {not_found_vuln}") - assert len(not_found_vuln) == 0 - - # Check alerts - - - + if state: + print(f"No found {not_found_vuln}") + assert len(not_found_vuln) == 0 + else: + print(f"Found unexpected vulnerabilities {found_vuln}") + assert len(found_vuln) == 0 def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict], host_manager: HostManager): diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 148416ee27..d1061f443c 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -530,7 +530,7 @@ def remove_package(self, host, package_name, system): result = False os_name = self.get_host_variables(host)['os_name'] if os_name == 'windows': - result = self.get_host(host).ansible("win_package", f"product_id={package_name} state=absent arguments=/S", check=False) + result = self.get_host(host).ansible("win_command", f"& '{package_name}' /S", check=False) elif os_name == 'linux': os = self.get_host_variables(host)['os'].split('_')[0] if os == 'centos': diff --git a/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 b/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 index cf87a44cc8..ff4d8cc3cb 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 +++ b/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 @@ -258,38 +258,6 @@ {% endif %} - - {% if wazuh_manager_config.vulnerability_detector.enabled is defined %} - {{ wazuh_manager_config.vulnerability_detector.enabled }} - {% endif %} - {% if wazuh_manager_config.vulnerability_detector.interval is defined %} - {{ wazuh_manager_config.vulnerability_detector.interval }} - {% endif %} - {% if wazuh_manager_config.vulnerability_detector.run_on_start is defined %} - {{ wazuh_manager_config.vulnerability_detector.run_on_start }} - {% endif %} - {% if wazuh_manager_config.vulnerability_detector.providers is defined %} - {% for provider_ in wazuh_manager_config.vulnerability_detector.providers %} - - {% if provider_.enabled is defined %} - {{ provider_.enabled }} - {% endif %} - {% if provider_.os is defined %} - {% for os_ in provider_.os %} - {{ os_ }} - {% endfor %} - {% endif %} - {% if provider_.update_from_year is defined %} - {{ provider_.update_from_year }} - {% endif %} - {% if provider_.update_interval is defined %} - {{ provider_.update_interval }} - {% endif %} - - {% endfor %} - {% endif %} - - {{ wazuh_manager_config.syscheck.disable }} diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 21af4c3600..69b8bff171 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -46,7 +46,7 @@ CVE: CVE-2023-2183 windows: amd64: - - PACKAGE_NAME: "vlc" + - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" PACKAGE_VERSION: "3.0.6" CVE: CVE-2023-47360 macos: @@ -54,43 +54,27 @@ - PACKAGE_NAME: "node" PACKAGE_VERSION: "17.0.1" CVE: CVE-2022-21824 - # teardown: - # tasks: - # - operation: remove_package - # target: agent - # package: - # centos: - # amd64: rclone - # arm64v8: postgresql11* - # ubuntu: - # amd64: grafana* - # arm64v8: grafana* - # windows: - # amd64: vlc - # macos: - # amd64: node* - -# ---------------------------------------------------------------------------------- -- case: "Updating a vulnerable package that remains vulnerable to the same CVE" - id: "update_vuln_package_vuln_remain" - description: "Updating a vulnerable package that remains vulnerable to the same CVE" +- case: "Removal of vulnerable package" + id: "remove_vulnerable_package" + description: "Installation of a vulnerable package" preconditions: null + depend: "install_package" body: tasks: - - operation: install_package + - operation: remove_package target: agent package: centos: - amd64: https://nmap.org/dist/nmap-6.47-1.x86_64.rpm - arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.18-2PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.18-1PGDG.rhel7.aarch64.rpm] + amd64: nmap + arm64v8: postgresql11* ubuntu: - amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb - arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb + amd64: grafana* + arm64v8: grafana* windows: - amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.8-win64.exe + amd64: vlc macos: - amd64: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg + amd64: node* - operation: check_agent_vulnerability target: agent parameters: @@ -98,12 +82,13 @@ api: True alert: False state_indice: False + state: False vulnerability_data: centos: amd64: - PACKAGE_NAME: "nmap" - PACKAGE_VERSION: "6.47-1" - CVE: CVE-2020-28924 + PACKAGE_VERSION: "6.46-1" + CVE: CVE-2018-15173 arm64v8: - PACKAGE_NAME: "postgresql11" PACKAGE_VERSION: "11.17.2" @@ -111,114 +96,175 @@ ubuntu: amd64: - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.6" + PACKAGE_VERSION: "8.5.5" CVE: CVE-2023-2183 arm64v8: - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.6" + PACKAGE_VERSION: "8.5.5" CVE: CVE-2023-2183 windows: amd64: - PACKAGE_NAME: "vlc" - PACKAGE_VERSION: "3.0.8" + PACKAGE_VERSION: "3.0.6" CVE: CVE-2023-47360 macos: amd64: - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.1.0" + PACKAGE_VERSION: "17.0.1" CVE: CVE-2022-21824 - teardown: - tasks: - - operation: remove_package - target: agent - package: - windows: - amd64: vlc -# --------------------------------------------------------------------- -- case: "Updating a vulnerable package that becomes vulnerable to another CVE" - id: "updating_vulnerable_package_another_cve" - description: "Updating a vulnerable package that becomes vulnerable to another CVE" - preconditions: - tasks: - - operation: install_package - target: agent - package: - windows: - amd64: https://get.videolan.org/vlc/3.0.7/win32/vlc-3.0.7-win32.exe - - operation: check_agent_vulnerability - target: agent - parameters: - alert_indexed: False - api: True - alert: False - state_indice: False - vulnerability_data: - windows: - amd64: - - PACKAGE_NAME: "vlc" - PACKAGE_VERSION: "3.0.8" - CVE: CVE-2023-47360 - body: - tasks: - - operation: install_package - target: agent - package: - centos: - amd64: https://nmap.org/dist/nmap-7.00-1.x86_64.rpm - arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.20-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.20-1PGDG.rhel7.aarch64.rpm] - ubuntu: - amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_amd64.deb - arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb - windows: - amd64: https://get.videolan.org/vlc/3.0.7.1/win64/vlc-3.0.7.1-win64.exe - macos: - amd64: https://nodejs.org/dist/v18.0.0/node-v18.0.0.pkg - - operation: check_agent_vulnerability - target: agent - parameters: - alert_indexed: False - api: True - alert: False - state_indice: False - vulnerability_data: - centos: - amd64: - # Wrong package - - PACKAGE_NAME: "nmap" - PACKAGE_VERSION: "7.00" - CVE: CVE-2020-28924 - - PACKAGE_NAME: "nmap" - PACKAGE_VERSION: "7.00" - CVE: CVE-2018-1000161 - arm64v8: - - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.20" - CVE: CVE-2023-39417 - ubuntu: - amd64: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "9.1.1" - CVE: CVE-2023-1387 - arm64v8: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "9.1.1" - CVE: CVE-2023-1387 - windows: - amd64: - - PACKAGE_NAME: "vlc" - PACKAGE_VERSION: "3.0.7" - CVE: CVE-2019-13962 - STATUS: ABSENT - - PACKAGE_NAME: "vlc" - PACKAGE_VERSION: "3.0.7.1" - CVE: CVE-2019-14437 - # Wrong package - macos: - amd64: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.1.0" - CVE: CVE-2022-21824 -# ----------------------------------------------------------- \ No newline at end of file + +# ---------------------------------------------------------------------------------- + +# - case: "Updating a vulnerable package that remains vulnerable to the same CVE" +# id: "update_vuln_package_vuln_remain" +# description: "Updating a vulnerable package that remains vulnerable to the same CVE" +# preconditions: null +# body: +# tasks: +# - operation: install_package +# target: agent +# package: +# centos: +# amd64: https://nmap.org/dist/nmap-6.47-1.x86_64.rpm +# arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.18-2PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.18-1PGDG.rhel7.aarch64.rpm] +# ubuntu: +# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb +# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb +# windows: +# amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.8-win64.exe +# macos: +# amd64: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg +# - operation: check_agent_vulnerability +# target: agent +# parameters: +# alert_indexed: False +# api: True +# alert: False +# state_indice: False +# vulnerability_data: +# centos: +# amd64: +# - PACKAGE_NAME: "nmap" +# PACKAGE_VERSION: "6.47-1" +# CVE: CVE-2020-28924 +# arm64v8: +# - PACKAGE_NAME: "postgresql11" +# PACKAGE_VERSION: "11.17.2" +# CVE: CVE-2023-39417 +# ubuntu: +# amd64: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "8.5.6" +# CVE: CVE-2023-2183 +# arm64v8: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "8.5.6" +# CVE: CVE-2023-2183 +# windows: +# amd64: +# - PACKAGE_NAME: "vlc" +# PACKAGE_VERSION: "3.0.8" +# CVE: CVE-2023-47360 +# macos: +# amd64: +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "17.1.0" +# CVE: CVE-2022-21824 +# teardown: +# tasks: +# - operation: remove_package +# target: agent +# package: +# windows: +# amd64: vlc + +# # --------------------------------------------------------------------- + +# - case: "Updating a vulnerable package that becomes vulnerable to another CVE" +# id: "updating_vulnerable_package_another_cve" +# description: "Updating a vulnerable package that becomes vulnerable to another CVE" +# preconditions: +# tasks: +# - operation: install_package +# target: agent +# package: +# windows: +# amd64: https://get.videolan.org/vlc/3.0.7/win32/vlc-3.0.7-win32.exe +# - operation: check_agent_vulnerability +# target: agent +# parameters: +# alert_indexed: False +# api: True +# alert: False +# state_indice: False +# vulnerability_data: +# windows: +# amd64: +# - PACKAGE_NAME: "vlc" +# PACKAGE_VERSION: "3.0.8" +# CVE: CVE-2023-47360 +# body: +# tasks: +# - operation: install_package +# target: agent +# package: +# centos: +# amd64: https://nmap.org/dist/nmap-7.00-1.x86_64.rpm +# arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.20-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.20-1PGDG.rhel7.aarch64.rpm] +# ubuntu: +# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_amd64.deb +# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb +# windows: +# amd64: https://get.videolan.org/vlc/3.0.7.1/win64/vlc-3.0.7.1-win64.exe +# macos: +# amd64: https://nodejs.org/dist/v18.0.0/node-v18.0.0.pkg +# - operation: check_agent_vulnerability +# target: agent +# parameters: +# alert_indexed: False +# api: True +# alert: False +# state_indice: False +# vulnerability_data: +# centos: +# amd64: +# # Wrong package +# - PACKAGE_NAME: "nmap" +# PACKAGE_VERSION: "7.00" +# CVE: CVE-2020-28924 +# - PACKAGE_NAME: "nmap" +# PACKAGE_VERSION: "7.00" +# CVE: CVE-2018-1000161 +# arm64v8: +# - PACKAGE_NAME: "postgresql11" +# PACKAGE_VERSION: "11.20" +# CVE: CVE-2023-39417 +# ubuntu: +# amd64: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "9.1.1" +# CVE: CVE-2023-1387 +# arm64v8: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "9.1.1" +# CVE: CVE-2023-1387 +# windows: +# amd64: +# - PACKAGE_NAME: "vlc" +# PACKAGE_VERSION: "3.0.7" +# CVE: CVE-2019-13962 +# STATUS: ABSENT +# - PACKAGE_NAME: "vlc" +# PACKAGE_VERSION: "3.0.7.1" +# CVE: CVE-2019-14437 +# # Wrong package +# macos: +# amd64: +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "17.1.0" +# CVE: CVE-2022-21824 + +# # ----------------------------------------------------------- \ No newline at end of file diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 4d836d961d..14311a3c9d 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -119,7 +119,6 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: yield - # Truncate alerts and logs of managers and agents logger.error("Truncate managers and agents logs") truncate_remote_host_group_files(host_manager, 'all', 'logs') @@ -250,7 +249,6 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): cases = yaml.load(cases_file, Loader=yaml.FullLoader) - complete_list = [ ( case['preconditions'] if 'preconditions' in case else None, @@ -260,13 +258,14 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): for case in cases ] -dependencies = [None if 'depends' not in case else pytest.mark.depends(name=case['depend']) for case in cases] -list_ids = [ case['id'] for case in cases] +dependencies = [None if 'depends' not in case else pytest.mark.depends(name=case['id'], depends=case['depends']) for case in cases] +list_ids = [case['id'] for case in cases] +@pytest.mark.dependency() # @pytest.mark.dependency(depends=["test_syscollector_second_scan"]) @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) -def test_vulnerability_detector_scans(preconditions, body, teardown, setup, host_manager): +def test_vulnerability_detector_scans(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager): # Launch tests tasks launch_parallel_operations(body['tasks'], host_manager) From 50e2274312c24571f70e4ded2b97066b3ce2f343 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 28 Nov 2023 15:22:48 +0000 Subject: [PATCH 071/174] fix: change configuration to match refactor VD --- .../var-ossec-etc-ossec-server.conf.j2 | 25 +++++++++++++ .../configurations/manager.yaml | 35 ++----------------- 2 files changed, 28 insertions(+), 32 deletions(-) diff --git a/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 b/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 index ff4d8cc3cb..9c8cbd223d 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 +++ b/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 @@ -655,4 +655,29 @@ {{ wazuh_manager_config.cluster.hidden }} + + yes + + {% for host in groups['wazuh_indexer'] %} + https://{{ host }}:9200 + {% endfor %} + + {{ indexer_security_user }} + {{ indexer_security_password }} + + + + {{ filebeat_ssl_dir }}/root-ca.pem + + {{ filebeat_ssl_dir }}/{{ filebeat_node_name }}.pem + {{ filebeat_ssl_dir }}/{{ filebeat_node_name }}-key.pem + + + + + no + yes + 2h + + diff --git a/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml index e3c38cdb8d..76ffcce032 100644 --- a/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml +++ b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml @@ -1,41 +1,12 @@ - sections: - section: vulnerability-detector elements: - - interval: - value: 5m - enabled: value: 'yes' - - run_on_start: + - index-status: value: 'yes' - - provider: - attributes: - - name: 'redhat' - elements: - - enabled: - value: 'yes' - - os: - value: 7 - - provider: - attributes: - - name: 'canonical' - elements: - - enabled: - value: 'yes' - - os: - value: jammy - - provider: - attributes: - - name: 'msu' - elements: - - enabled: - value: 'yes' - - provider: - attributes: - - name: 'nvd' - elements: - - enabled: - value: 'yes' - + - feed-update-interval: + value: '2h' - section: sca elements: - enabled: From 05b92a9141ff068e8e9cf218314f471d7048bf2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 28 Nov 2023 16:56:57 +0000 Subject: [PATCH 072/174] fix: VD E2E configuration load function --- .../var-ossec-etc-ossec-server.conf.j2 | 24 --- .../cases/test_vulnerability.yaml | 148 +++++++++++++++++- .../configurations/manager.yaml | 49 ++++++ .../test_vulnerability_detector.py | 82 +++++++--- 4 files changed, 252 insertions(+), 51 deletions(-) diff --git a/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 b/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 index 9c8cbd223d..3729b2a7b8 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 +++ b/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 @@ -655,29 +655,5 @@ {{ wazuh_manager_config.cluster.hidden }} - - yes - - {% for host in groups['wazuh_indexer'] %} - https://{{ host }}:9200 - {% endfor %} - - {{ indexer_security_user }} - {{ indexer_security_password }} - - - - {{ filebeat_ssl_dir }}/root-ca.pem - - {{ filebeat_ssl_dir }}/{{ filebeat_node_name }}.pem - {{ filebeat_ssl_dir }}/{{ filebeat_node_name }}-key.pem - - - - - no - yes - 2h - diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 69b8bff171..8d2ff00b22 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -17,13 +17,12 @@ amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe macos: amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg + arm64v8: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg - operation: check_agent_vulnerability target: agent parameters: alert_indexed: False - api: True - alert: False state_indice: False vulnerability_data: centos: @@ -54,12 +53,152 @@ - PACKAGE_NAME: "node" PACKAGE_VERSION: "17.0.1" CVE: CVE-2022-21824 + arm64v8: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.0.1" + CVE: CVE-2022-21824 + + +- case: "Upgrade of a vulnerable package: Remain vulnerable" + id: "upgrade_package_remain_vulnerable" + description: "Upgrade of a vulnerable package: Remain vulnerable" + preconditions: null + depends: "install_package" + body: + tasks: + - operation: install_package + target: agent + package: + centos: + amd64: https://nmap.org/dist/nmap-6.47-1.x86_64.rpm + arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.18-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.18-1PGDG.rhel7.aarch64.rpm] + ubuntu: + amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb + arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb + windows: + amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.7-win64.exe + macos: + amd64: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg + arm64v8: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg + + - operation: check_agent_vulnerability + target: agent + parameters: + alert_indexed: False + state_indice: False + vulnerability_data: + centos: + amd64: + - PACKAGE_NAME: "nmap" + PACKAGE_VERSION: "6.47-1" + CVE: CVE-2018-15173 + arm64v8: + - PACKAGE_NAME: "postgresql11" + PACKAGE_VERSION: "11.17.2" + CVE: CVE-2023-39417 + ubuntu: + amd64: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.5" + CVE: CVE-2023-2183 + arm64v8: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.5" + CVE: CVE-2023-2183 + windows: + amd64: + - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" + PACKAGE_VERSION: "3.0.7" + CVE: CVE-2023-47360 + macos: + amd64: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.1.0" + CVE: CVE-2022-21824 + arm64v8: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.1.0" + CVE: CVE-2022-21824 + +# Check that previous vuln does not longer appear and that new appear +- case: "Upgrade of a vulnerable package: New vulnerability" + id: "upgrade_package_new_vulnerability" + description: "Upgrade of a vulnerable package that become vulnerable to another CVE" + preconditions: null + depends: "upgrade_package_remain_vulnerable" + body: + tasks: + - operation: install_package + target: agent + package: + centos: + amd64: https://nmap.org/dist/nmap-6.47-1.x86_64.rpm + arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.18-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.18-1PGDG.rhel7.aarch64.rpm] + ubuntu: + amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb + arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb + windows: + amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.7-win64.exe + macos: + amd64: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg + arm64v8: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg + + - operation: check_agent_vulnerability + target: agent + parameters: + alert_indexed: False + state_indice: False + vulnerability_data: + centos: + amd64: + - PACKAGE_NAME: "nmap" + PACKAGE_VERSION: "6.47-1" + CVE: CVE-2018-15173 + arm64v8: + - PACKAGE_NAME: "postgresql11" + PACKAGE_VERSION: "11.17.2" + CVE: CVE-2023-39417 + ubuntu: + amd64: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.5" + CVE: CVE-2023-2183 + arm64v8: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.5" + CVE: CVE-2023-2183 + windows: + amd64: + - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" + PACKAGE_VERSION: "3.0.7" + CVE: CVE-2023-47360 + macos: + amd64: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.1.0" + CVE: CVE-2022-21824 + arm64v8: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.1.0" + CVE: CVE-2022-21824 + + + +# Check that previous vuln appear and a new one + + +# Check no new vuln appear + + +# Delete non-vulnerable package + +# Install vulnerable package again and remove it - case: "Removal of vulnerable package" id: "remove_vulnerable_package" description: "Installation of a vulnerable package" preconditions: null - depend: "install_package" + depends: "install_package" body: tasks: - operation: remove_package @@ -116,6 +255,9 @@ + + + # ---------------------------------------------------------------------------------- # - case: "Updating a vulnerable package that remains vulnerable to the same CVE" diff --git a/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml index 76ffcce032..d3874b4737 100644 --- a/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml +++ b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml @@ -7,6 +7,55 @@ value: 'yes' - feed-update-interval: value: '2h' + + - section: indexer + elements: + - enabled: + value: yes + - hosts: + elements: + - host: "http://INDEXER_SERVER:9200" + - username: + value: INDEXER_USERNAME + - password: + value: INDEXER_PASSWORD + - ssl: + elements: + - certificate_authorities: + elements: + ca: FILEBEAT_ROOT_CA + certificate: FILEBEAT_CERTIFICATE + key: FILEBEAT_KEY + + - section: sca + elements: + - enabled: + value: 'no' + + - section: rootcheck + elements: + - disabled: + value: 'yes' + + - section: syscheck + elements: + - disabled: + value: 'yes' + + - section: wodle + attributes: + - name: 'syscollector' + elements: + - disabled: + value: 'no' + + + + + + + + - section: sca elements: - enabled: diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 14311a3c9d..c0628a56d1 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -44,6 +44,7 @@ import pytest import logging import yaml +import time from typing import Generator, Dict, List @@ -72,16 +73,40 @@ } -def load_vulnerability_detector_configurations(): +def load_vulnerability_detector_configurations(host_manager): """Return the configurations for Vulnerability testing for the agent and manager roles Return: Dict: Configurations for each role """ - return { - 'agent': load_configuration_template(configurations_paths['agent'], [{}], [{}]), - 'manager': load_configuration_template(configurations_paths['manager'], [{}], [{}]) - } + configurations = {} + + for host in host_manager.get_group_hosts('all'): + if host in host_manager.get_group_hosts('agent'): + configurations[host] = load_configuration_template(configurations_paths['agent'], [{}], [{}]) + elif host in host_manager.get_group_hosts('manager'): + configuration_template = load_configuration_template(configurations_paths['manager'], [{}], [{}]) + + # Replace placeholders by real values + indexer_server = host_manager.get_group_hosts('indexer')[0] + indexer_server_variables = host_manager.get_host_variables(indexer_server) + manager_index = host_manager.get_host_groups('manager').index(host) + + configuration_variables = { + 'INDEXER_USERNAME': indexer_server_variables['indexer_user'], + 'INDEXER_PASSWORD': indexer_server_variables['indexer_password'], + 'INDEXER_SERVER': indexer_server_variables['ip'], + 'FILEBEAT_ROOT_CA': '/etc/pki/filebeat/root-ca.pem', + 'FILEBEAT_CERTIFICATE': f"/etc/pki/filebeat/node-{manager_index}.pem", + 'FILEBEAT_KEY': f"/etc/pki/filebeat/node-{manager_index}-key.pem" + } + + for key, value in configuration_variables.items(): + configuration_templace = configuration_templace.replace(key, value) + + configurations[host] = configuration_templace + + return configurations @pytest.fixture(scope='module') @@ -200,17 +225,24 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): # Wait until all agents has been scanned logger.critical("Waiting until agent's VD scan is over") - wait_until_vuln_scan_agents_finished(host_manager) - # Check vulnerabilities for agent + # To Do: Replace with relevan event. For now timeout + time.sleep(300) + # wait_until_vuln_scan_agents_finished(host_manager) + logger.critical("Check agent's vulnerabilities") - agents_vuln_before_second_scan = get_agents_vulnerabilities(host_manager) - for agent, vuln in agents_vuln_before_second_scan.items(): - assert vuln, f"No vulnerabilities were detected for agent {agent}" + # Check that each agent has generated alerts + agents_vuln_first_scan = {} + indexer_alerts_first_scan = get_indexer_values(host_manager) + for agent in host_manager.get_group_hosts('agent'): + # Include vuln to the agent + import pdb; pdb.set_trace() + + # Check index state + # index_state = get_indexer_values(host_manager, '') + + # Assert consistency - # Compare agents_vuln_before_second_scan with state_index_content - logger.critical("Comparing state and API vulnerabilities for each agent") - check_vuln_state_consistency(host_manager, agents_vuln_before_second_scan) # Truncate manager_logs to prevent trigger wait_until_vuln_scan_agents_finished wrongly logger.critical("Truncating manager logs") @@ -225,20 +257,20 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): monitoring_events_multihost(host_manager, monitoring_data) - # WARNING - # Is possible that second scan will not produce expected Finished Scan in the agent. - # In that case search for another event or include a hardcoded timeout - logger.critical("Waiting until agent's VD scan is over") - wait_until_vuln_scan_agents_finished(host_manager) + # To Do: Replace with relevan event. For now timeout + time.sleep(300) + + agents_vuln_second_scan = {} + indexer_alerts_second_scan = get_indexer_values(host_manager) + for agent in host_manager.get_group_hosts('agent'): + # Include vuln to the agent + import pdb; pdb.set_trace() - logger.critical("Getting agents's vulnerabilities") - agents_vuln_after_second_scan = get_agents_vulnerabilities(host_manager) + # Check index state + # index_state = get_indexer_values(host_manager, '') - assert agents_vuln_before_second_scan == agents_vuln_after_second_scan + # Assert consistency - # Compare agents_vuln_after_second_scan with state_index_content - logger.critical("Comparing state and API vulnerabilities for each agent") - check_vuln_state_consistency(host_manager, agents_vuln_after_second_scan) # ------------------------- @@ -262,6 +294,8 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): list_ids = [case['id'] for case in cases] +print(dependencies) + @pytest.mark.dependency() # @pytest.mark.dependency(depends=["test_syscollector_second_scan"]) @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) From 33078bab59227760e0f0db77eeda771330065743 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 28 Nov 2023 19:32:05 +0000 Subject: [PATCH 073/174] fix: addapt VD to refactor --- .../wazuh_testing/end_to_end/configuration.py | 9 +-- .../wazuh_testing/end_to_end/indexer_api.py | 10 +-- .../end_to_end/remote_operations_handler.py | 77 ++++--------------- .../wazuh_testing/end_to_end/waiters.py | 2 +- .../var-ossec-etc-ossec-server.conf.j2 | 1 + .../cases/test_vulnerability.yaml | 6 +- .../configurations/manager.yaml | 26 ++++--- .../test_vulnerability_detector.py | 70 ++++++++--------- 8 files changed, 75 insertions(+), 126 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py index 405a8e6163..d87f1ba6a2 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -60,25 +60,24 @@ def restore_configuration(host_manager: HostManager, configuration: dict) -> Non host_manager.modify_file_content(host, configuration_filepath, configuration[host]) -def configure_host(host: str, host_configuration_role: dict, host_manager: HostManager) -> None: +def configure_host(host: str, host_configuration: dict, host_manager: HostManager) -> None: """ Configure a specific host. Args: host: The name of the host to be configured. - host_configuration_role: Role of the configured host for the host. + host_configuration: Role of the configured host for the host. host_manager: An instance of the HostManager class containing information about hosts. """ host_os = host_manager.get_host_variables(host)['os_name'] config_file_path = configuration_filepath_os[host_os] - host_groups = host_manager.get_host_groups(host) - host_config = host_configuration_role.get('manager' if 'manager' in host_groups else 'agent', None) + host_config = host_configuration.get(host) if not host_config: raise TypeError(f"Host {host} configuration does not include a valid role (manager or agent):" - "{host_configuration_role}") + f"{host_configuration}") current_config = host_manager.get_file_content(str(host), config_file_path) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index 6e79904340..a0b0b6ac6e 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -35,12 +35,8 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' Returns: str: The response text from the indexer API. """ - url = f"https://{host_manager.get_master_ip()}:9200/{index}_search?" - query = { - "query": { - "match_all": {} - } - } - response = requests.get(url=url, params={'pretty': 'true'}, json=query, verify=False, + url = f"https://{host_manager.get_master_ip()}:9200/{index}/_search" + response = requests.get(url=url, params={'pretty': 'true'}, verify=False, auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password'])) + return response.json() diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 2d12eb2343..e9aaf08f64 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -107,14 +107,8 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man elif operation == 'check_agent_vulnerability': print("Check agent vuln") if operation_data['parameters']['alert_indexed']: - print("Check alert indexed") check_vuln_alert_indexer(host_manager, operation_data['vulnerability_data']) - if operation_data['parameters']['api']: - print("Check vuln in api response") - check_vuln_alert_api(host_manager, operation_data['vulnerability_data'], - operation_data['parameters'].get('state', True)) - if operation_data['parameters']['state_indice']: check_vuln_state_index(host_manager, operation_data['vulnerability_data']) @@ -130,7 +124,16 @@ def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[s ToDo: Implement the functionality. """ - index_vuln_state_content = get_indexer_values(host_manager) + # It follows https://www.elastic.co/guide/en/ecs/current/ecs-vulnerability.html + index_vuln_state_content = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities')['hits']['hits'] + for index_vuln_state_content in index_vuln_state_content: + pass + + + agents_vuln_first_scan = {} + indexer_alerts_first_scan = get_indexer_values(host_manager)['hits']['hits'] + + def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict[str, Dict]): @@ -148,63 +151,13 @@ def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict Implement the functionality. """ indexer_alerts = get_indexer_values(host_manager) - - pass - -def check_vuln_alert_api(host_manager: HostManager, vulnerability_data: Dict[str, Dict], state=True): - """ - Check vulnerability alerts via API for a host. - - Args: - host_manager (HostManager): An instance of the HostManager class containing information about hosts. - vulnerability_data (dict): Dictionary containing vulnerability data. - - ToDo: - Implement the functionality. - """ - - api_vulns = get_agents_vulnerabilities(host_manager) - not_found_vuln = [] - found_vuln = [] + for alert in indexer_alerts_first_scan: + agent = alert['agent']['name'] + if re.match('CVE. affects.*', alert['description']): + agents_vuln_first_scan[agent] = alert for agent in host_manager.get_group_hosts('agent'): - print("\n\n---------------------------------") - print(f"Agent {agent}") - - agent_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] - agent_arch_name = host_manager.get_host_variables(agent)['architecture'] - vulnerability_data_agent = vulnerability_data[agent_os_name][agent_arch_name] - current_vulns_agent = api_vulns[agent] - print(f"Vuln of agent {agent}: {vulnerability_data_agent}") - for vulnerability in vulnerability_data_agent: - print(f"Searching for {agent} and {vulnerability['CVE']}") - expected_vuln = { - 'status': 'VALID', - 'cve': vulnerability['CVE'] - } - found = False - for current_vulnerability in current_vulns_agent: - if all(current_vulnerability[key] == value for key, value in expected_vuln.items()): - found = True - print(f"Found {current_vulnerability}") - found_vuln.append({ - 'agent': agent, - 'cve': vulnerability['CVE'] - }) - - if not found: - not_found_vuln.append({ - 'agent': agent, - 'cve': vulnerability['CVE'] - }) - print("\n\n---------------------------------") - - if state: - print(f"No found {not_found_vuln}") - assert len(not_found_vuln) == 0 - else: - print(f"Found unexpected vulnerabilities {found_vuln}") - assert len(found_vuln) == 0 + assert agent not in agents_vuln_first_scan, f"No vulnerabilities were detected for Agent {agent}" def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict], host_manager: HostManager): diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index ee29749743..e62b7bb81d 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -35,7 +35,7 @@ def wait_until_vd_is_updated(host_manager: HostManager) -> None: for manager in host_manager.get_group_hosts('manager'): monitoring_data = generate_monitoring_logs_manager( - host_manager, manager, 'Starting vulnerability scan', 800 + host_manager, manager, 'Message processed', 1000 ) monitoring_events_multihost(host_manager, monitoring_data) diff --git a/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 b/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 index 3729b2a7b8..fb2ed9abc2 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 +++ b/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 @@ -656,4 +656,5 @@ + diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 8d2ff00b22..52f4fc2814 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -192,6 +192,7 @@ # Delete non-vulnerable package + # Install vulnerable package again and remove it - case: "Removal of vulnerable package" @@ -253,11 +254,6 @@ CVE: CVE-2022-21824 - - - - - # ---------------------------------------------------------------------------------- # - case: "Updating a vulnerable package that remains vulnerable to the same CVE" diff --git a/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml index d3874b4737..573458d967 100644 --- a/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml +++ b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml @@ -1,5 +1,5 @@ - sections: - - section: vulnerability-detector + - section: vulnerability-detection elements: - enabled: value: 'yes' @@ -11,21 +11,25 @@ - section: indexer elements: - enabled: - value: yes + value: 'yes' - hosts: elements: - - host: "http://INDEXER_SERVER:9200" + - host: + value: "https://INDEXER_SERVER:9200" - username: - value: INDEXER_USERNAME + value: INDEXER_USERNAME - password: - value: INDEXER_PASSWORD + value: INDEXER_PASSWORD - ssl: - elements: - - certificate_authorities: - elements: - ca: FILEBEAT_ROOT_CA - certificate: FILEBEAT_CERTIFICATE - key: FILEBEAT_KEY + elements: + - certificate_authorities: + elements: + - ca: + value: FILEBEAT_ROOT_CA + - certificate: + value: FILEBEAT_CERTIFICATE + - key: + value: FILEBEAT_KEY - section: sca elements: diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index c0628a56d1..984db5c72f 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -45,7 +45,8 @@ import logging import yaml import time - +import re +import ast from typing import Generator, Dict, List from wazuh_testing.end_to_end.configuration import backup_configurations, restore_configuration, configure_environment @@ -90,7 +91,7 @@ def load_vulnerability_detector_configurations(host_manager): # Replace placeholders by real values indexer_server = host_manager.get_group_hosts('indexer')[0] indexer_server_variables = host_manager.get_host_variables(indexer_server) - manager_index = host_manager.get_host_groups('manager').index(host) + manager_index = host_manager.get_group_hosts('manager').index(host) + 2 configuration_variables = { 'INDEXER_USERNAME': indexer_server_variables['indexer_user'], @@ -100,13 +101,14 @@ def load_vulnerability_detector_configurations(host_manager): 'FILEBEAT_CERTIFICATE': f"/etc/pki/filebeat/node-{manager_index}.pem", 'FILEBEAT_KEY': f"/etc/pki/filebeat/node-{manager_index}-key.pem" } + configuration_template_str = str(configuration_template) for key, value in configuration_variables.items(): - configuration_templace = configuration_templace.replace(key, value) + configuration_template_str = configuration_template_str.replace(key, value) - configurations[host] = configuration_templace + configurations[host] = ast.literal_eval(configuration_template_str) - return configurations + return configurations @pytest.fixture(scope='module') @@ -122,7 +124,7 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: logger.error("Getting backup of current configurations") hosts_configuration_backup = backup_configurations(host_manager) logger.error("Configuring environment") - configure_environment(host_manager, load_vulnerability_detector_configurations()) + configure_environment(host_manager, load_vulnerability_detector_configurations(host_manager)) # Restart managers and stop agents logger.error("Stopping agents") @@ -210,6 +212,8 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): - syscollector - vulnerability_detector """ + + # Monitor for the first Syscollector scan in all the agents logger.critical("Monitoring Syscollector First Scan") monitoring_data = generate_monitoring_logs_all_agent(host_manager, @@ -231,18 +235,23 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): # wait_until_vuln_scan_agents_finished(host_manager) logger.critical("Check agent's vulnerabilities") + # Check that each agent has generated alerts agents_vuln_first_scan = {} - indexer_alerts_first_scan = get_indexer_values(host_manager) - for agent in host_manager.get_group_hosts('agent'): - # Include vuln to the agent - import pdb; pdb.set_trace() + indexer_alerts_first_scan = get_indexer_values(host_manager)['hits']['hits'] - # Check index state - # index_state = get_indexer_values(host_manager, '') + for alert in indexer_alerts_first_scan: + agent = alert['agent']['name'] + if re.match('CVE. affects.*', alert['description']): + agents_vuln_first_scan[agent] = alert - # Assert consistency + for agent in host_manager.get_group_hosts('agent'): + assert agent not in agents_vuln_first_scan, f"No vulnerabilities were detected for Agent {agent}" + # Check index state + index_state_first_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities') + assert index_state_first_scan['hits']['total']['value'] != 0, "Index state value does not include any" \ + f"vulnerability {index_state_first_scan}" # Truncate manager_logs to prevent trigger wait_until_vuln_scan_agents_finished wrongly logger.critical("Truncating manager logs") @@ -261,17 +270,22 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): time.sleep(300) agents_vuln_second_scan = {} - indexer_alerts_second_scan = get_indexer_values(host_manager) - for agent in host_manager.get_group_hosts('agent'): - # Include vuln to the agent - import pdb; pdb.set_trace() - - # Check index state - # index_state = get_indexer_values(host_manager, '') + indexer_alerts_second_scan = get_indexer_values(host_manager)['hits']['hits'] - # Assert consistency + for alert in indexer_alerts_second_scan: + agent = alert['agent']['name'] + if re.match('CVE. affects.*', alert['description']): + agents_vuln_second_scan[agent] = alert + assert agents_vuln_second_scan == agents_vuln_first_scan, "Differences between first and second syscollector" \ + f"First: Scan: {agents_vuln_first_scan}" \ + f"Second Scan: {agents_vuln_second_scan}" + # Check index state + index_state_second_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities') + assert index_state_second_scan == index_state_first_scan, f"Index state value changed between scans: " \ + f"First scan: {index_state_first_scan}" \ + f"Second scan: {index_state_second_scan}" # ------------------------- @@ -294,8 +308,6 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): list_ids = [case['id'] for case in cases] -print(dependencies) - @pytest.mark.dependency() # @pytest.mark.dependency(depends=["test_syscollector_second_scan"]) @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) @@ -303,15 +315,3 @@ def test_vulnerability_detector_scans(setup_vulnerability_tests, preconditions, # Launch tests tasks launch_parallel_operations(body['tasks'], host_manager) - - # # Check vulnerability - # agents_vuln_after_second_scan = get_agents_vulnerabilities(host_manager) - - # Check alert in Wazuh Indexer - # monitoring_data = generate_monitoring_alerts_all_agent(host_manager, body['check_alerts']) - # expected_alerts = body['check_agent_alert_indexer'] - - # Check agent System state - # To Do - # results = monitoring_events(host_manager, monitoring_data) - # assert all(results.values()), f"Expected message was not triggered for some agents, {results}" From 556ac82e8c17352b8171034aacc204bae3290072 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 29 Nov 2023 18:41:02 +0000 Subject: [PATCH 074/174] feat: include support for check indices and fix indexer api method --- .../wazuh_testing/end_to_end/indexer_api.py | 20 ++- .../end_to_end/remote_operations_handler.py | 115 ++++++++++++++---- .../test_vulnerability_detector.py | 92 ++++++++------ 3 files changed, 165 insertions(+), 62 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index a0b0b6ac6e..62870887df 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -35,8 +35,24 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' Returns: str: The response text from the indexer API. """ + url = f"https://{host_manager.get_master_ip()}:9200/{index}/_search" - response = requests.get(url=url, params={'pretty': 'true'}, verify=False, - auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password'])) + headers = { + 'Content-Type': 'application/json', + } + + data = { + "query": { + "match_all": {} + } + } + param = { + 'pretty': 'true', + 'size': 10000, + } + + response = requests.get(url=url, params=param, verify=False, + auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password']), headers=headers, + json=data) return response.json() diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index e9aaf08f64..a638adecdb 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -20,9 +20,10 @@ This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ - +import re from typing import Dict, List from multiprocessing.pool import ThreadPool +from datetime import datetime, timezone from wazuh_testing.end_to_end.indexer_api import get_indexer_values from wazuh_testing.tools.system import HostManager @@ -32,7 +33,8 @@ from wazuh_testing.end_to_end.regex import get_event_regex from wazuh_testing.end_to_end.logs import truncate_remote_host_group_files -def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): +def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_manager: HostManager, + current_datetime: str = None): """ Launch a remote operation on the specified host. @@ -52,7 +54,6 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man print("Performing remote operations") - if system == 'linux': system = host_manager.get_host_variables(host)['os'].split('_')[0] @@ -105,15 +106,14 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man wait_until_vuln_scan_agents_finished(host_manager) elif operation == 'check_agent_vulnerability': - print("Check agent vuln") if operation_data['parameters']['alert_indexed']: - check_vuln_alert_indexer(host_manager, operation_data['vulnerability_data']) + check_vuln_alert_indexer(host_manager, operation_data['vulnerability_data'], current_datetime) if operation_data['parameters']['state_indice']: - check_vuln_state_index(host_manager, operation_data['vulnerability_data']) + check_vuln_state_index(host_manager, operation_data['vulnerability_data'], current_datetime) -def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[str, Dict]): +def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[str, Dict], current_datetime: str = None): """ Check vulnerability state index for a host. @@ -124,19 +124,60 @@ def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[s ToDo: Implement the functionality. """ - # It follows https://www.elastic.co/guide/en/ecs/current/ecs-vulnerability.html index_vuln_state_content = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities')['hits']['hits'] - for index_vuln_state_content in index_vuln_state_content: - pass + expected_alerts_not_found = [] + + for agent in host_manager.get_group_hosts('agent'): + host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] + host_os_arch = host_manager.get_host_variables(agent)['architecture'] + + if host_os_name in vulnerability_data and host_os_arch in vulnerability_data: + vulnerabilities = vulnerability_data[host_os_name][host_os_arch] + for vulnerability in vulnerabilities: + + for indice_vuln in index_vuln_state_content: + state_agent = indice_vuln['agent']['name'] + state_cve = indice_vuln['vulnerability']['enumeration'] + state_package_name = indice_vuln['package']['name'] + state_package_version = indice_vuln['agent']['version'] + found = False + + if state_agent == agent and state_cve == vulnerability['CVE'] \ + and state_package_name == vulnerability['PACKAGE_NAME'] and \ + state_package_version == vulnerability['PACKAGE_VERSION']: + found = True + + if not found: + expected_alerts_not_found.append(vulnerability) + + assert len(expected_alerts_not_found) == 0, f"Expected alerts were not found {expected_alerts_not_found}" - agents_vuln_first_scan = {} - indexer_alerts_first_scan = get_indexer_values(host_manager)['hits']['hits'] +def detect_alerts_by_agent(alerts, regex, current_datetime=None): + alerts_vuln_by_agent = {} + for alert in alerts: + valid_timestamp = True + if current_datetime: + dt = datetime.strptime(alert['_source']['timestamp'], "%Y-%m-%dT%H:%M:%S.%f%z") + # Convert datetime to Unix timestamp (integer) + timestamp = int(dt.timestamp()) + if timestamp < current_datetime: + valid_timestamp = False + if valid_timestamp: + if re.match(regex, alert['_source']['rule']['description']): + if 'agent' in alert['_source']: + agent = alert['_source']['agent']['name'] + if agent not in alerts_vuln_by_agent: + alerts_vuln_by_agent[agent] = [] + else: + alerts_vuln_by_agent[agent].append(alert) + return alerts_vuln_by_agent -def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict[str, Dict]): + +def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict[str, Dict], current_datetime: str = None): """ Check vulnerability alerts in the indexer for a host. @@ -146,18 +187,40 @@ def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict Returns: list: List of vulnerability alerts. - - ToDo: - Implement the functionality. """ - indexer_alerts = get_indexer_values(host_manager) - for alert in indexer_alerts_first_scan: - agent = alert['agent']['name'] - if re.match('CVE. affects.*', alert['description']): - agents_vuln_first_scan[agent] = alert + regex_cve_affects = f"CVE.* affects .*" + regex_solved_vuln = f"The .* that affected .* was solved due to a package removal" + + + indexer_alerts = get_indexer_values(host_manager)['hits']['hits'] + # Get CVE affects alerts for all agents + detected_vuln_alerts_by_agent = detect_alerts_by_agent(indexer_alerts, regex_cve_affects, current_datetime) + solved_alerts_by_agent = detect_alerts_by_agent(indexer_alerts, regex_solved_vuln, current_datetime) + triggered_alerts = detected_vuln_alerts_by_agent + expected_alerts_not_found = [] + + if 'state' in vulnerability_data and not vulnerability_data['state']: + triggered_alerts = solved_alerts_by_agent for agent in host_manager.get_group_hosts('agent'): - assert agent not in agents_vuln_first_scan, f"No vulnerabilities were detected for Agent {agent}" + host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] + host_os_arch = host_manager.get_host_variables(agent)['architecture'] + + if host_os_name in vulnerability_data and host_os_arch in vulnerability_data: + vulnerabilities = vulnerability_data[host_os_name][host_os_arch] + for vulnerability in vulnerabilities: + cve = vulnerability['CVE'] + package = vulnerabilities['PACKAGE'] + version = vulnerabilities['VERSION'] + found = False + for triggered_alert in triggered_alerts[agent]: + if triggered_alert['cve'] == cve and triggered_alert['package'] == package and \ + triggered_alert['version'] == version: + found = True + if not found: + expected_alerts_not_found.append(vulnerability) + + assert len(expected_alerts_not_found) == 0, f"Expected alerts were not found {expected_alerts_not_found}" def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict], host_manager: HostManager): @@ -169,9 +232,15 @@ def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict task_list (list): List of dictionaries containing operation details. host_manager (HostManager): An instance of the HostManager class containing information about hosts. """ + # Get the current datetime in UTC + current_datetime = datetime.now(timezone.utc) + + # Convert datetime to Unix timestamp (integer) + timestamp = int(current_datetime.timestamp()) + if task_list: for task in task_list: - launch_remote_operation(agent, task, host_manager) + launch_remote_operation(agent, task, host_manager, timestamp) def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager): diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 984db5c72f..70ea2d551b 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -58,7 +58,7 @@ from wazuh_testing.end_to_end.indexer_api import get_indexer_values from wazuh_testing.tools.configuration import load_configuration_template from wazuh_testing.tools.system import HostManager -from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations +from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations, detect_alerts_by_agent from wazuh_testing.modules.syscollector import TIMEOUT_SYSCOLLECTOR_SCAN @@ -73,6 +73,13 @@ 'agent': os.path.join(configurations_dir, 'agent.yaml') } +def list_of_dicts_to_set_of_tuples(lst): + """ + Convert a list of dictionaries to a set of frozensets (tuples). + Each frozenset represents the items of a dictionary. + """ + return set(frozenset(d.items()) for d in lst) + def load_vulnerability_detector_configurations(host_manager): """Return the configurations for Vulnerability testing for the agent and manager roles @@ -133,6 +140,7 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: host_manager.control_environment('restart', ['manager']) # Wait until VD is updated + # To do: Change VD Feeds updated logger.error("Wait until Vulnerability Detector has update all the feeds") wait_until_vd_is_updated(host_manager) @@ -158,25 +166,41 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: host_manager.control_environment('restart', ['manager']) -def check_vuln_state_consistency(host_manager: HostManager, agents_vulnerabilities_api_value: Dict[str, List[Dict]]): - """Check the consistency of the vulnerabilities included in the indexer. - - This function ensures that the vulnerabilities gathered from the Wazuh API are included in the - Agent's Vulnerabilities index. - - Args: - host_manager (HostManager): Host manager of the environment. - agents_vulnerabilities_api_value (Dict[str, List[str]]): A dictionary containing the list of vulnerabilities - for each agent, where keys are agent names and values are lists of strings representing the vulnerabilities. - - Raises: - AssertionError: If inconsistencies are found between the API values and the state of the indexer. - """ +def check_vuln_state_consistency(vulnerabilities_alerts, vulnerabilities_states): # Get the indexer values - state_index_content_before_second_scan = get_indexer_values(host_manager) - - # To Do: Ensure consistency of the agent_vulnerabilities_api_value and state_index_content_before_second_scan - pass + alerts_vulnerabilities = [] + indices_vulnerabilities = [] + + for vulnerability in vulnerabilities_alerts.values(): + alert_agent = vulnerability['_source']['agent']['name'] + alert_cve = vulnerability['data']['vulnerability']['cve'] + alert_package_version = vulnerability['data']['vulnerability']['package']['version'] + alert_package_name = vulnerability['data']['vulnerability']['package']['name'] + alerts_vulnerabilities.append({ + 'cve': alert_cve, + 'agent': alert_agent, + 'package_name': alert_package_name, + 'package_version': alert_package_version + }) + + + for vulnerabilities_state in vulnerabilities_states: + state_agent = vulnerabilities_state['agent']['name'] + state_cve = vulnerabilities_state['vulnerability']['enumeration'] + state_package_name = vulnerabilities_state['package']['name'] + state_package_version = vulnerabilities_state['agent']['version'] + indices_vulnerabilities.append({ + 'cve': state_cve, + 'agent': state_agent, + 'package_name': state_package_name, + 'package_version': state_package_version + }) + + indices_vulnerabilities_set = list_of_dicts_to_set_of_tuples(indices_vulnerabilities) + alerts_vulnerabilities_set = list_of_dicts_to_set_of_tuples(alerts_vulnerabilities) + + # Assert that the sets are equal + assert indices_vulnerabilities_set == alerts_vulnerabilities_set, "Discrepancies beetween alerts and states indices" @pytest.mark.dependency() @@ -212,7 +236,7 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): - syscollector - vulnerability_detector """ - + TIMEOUT_AGENTS_VULNERABILITY_SCAN = 300 # Monitor for the first Syscollector scan in all the agents logger.critical("Monitoring Syscollector First Scan") @@ -230,8 +254,8 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): # Wait until all agents has been scanned logger.critical("Waiting until agent's VD scan is over") - # To Do: Replace with relevan event. For now timeout - time.sleep(300) + # ToDo: Replace with relevan event. For now timeout + time.sleep(TIMEOUT_AGENTS_VULNERABILITY_SCAN) # wait_until_vuln_scan_agents_finished(host_manager) logger.critical("Check agent's vulnerabilities") @@ -240,18 +264,17 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): agents_vuln_first_scan = {} indexer_alerts_first_scan = get_indexer_values(host_manager)['hits']['hits'] - for alert in indexer_alerts_first_scan: - agent = alert['agent']['name'] - if re.match('CVE. affects.*', alert['description']): - agents_vuln_first_scan[agent] = alert + vuln_alerts_by_agent_first_scan = detect_alerts_by_agent(indexer_alerts_first_scan, 'CVE. affects.*') + # Check that it has been triggered vulnerability detector alerts for agent in host_manager.get_group_hosts('agent'): - assert agent not in agents_vuln_first_scan, f"No vulnerabilities were detected for Agent {agent}" + assert agent not in vuln_alerts_by_agent_first_scan, f"No vulnerabilities were detected for Agent {agent}" + assert len(vuln_alerts_by_agent_first_scan[agent]) != 0 - # Check index state + # Check index state is not empty index_state_first_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities') - assert index_state_first_scan['hits']['total']['value'] != 0, "Index state value does not include any" \ - f"vulnerability {index_state_first_scan}" + + check_vuln_state_consistency(index_state_first_scan, vuln_alerts_by_agent_first_scan) # Truncate manager_logs to prevent trigger wait_until_vuln_scan_agents_finished wrongly logger.critical("Truncating manager logs") @@ -271,13 +294,9 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): agents_vuln_second_scan = {} indexer_alerts_second_scan = get_indexer_values(host_manager)['hits']['hits'] + vuln_alerts_by_agent_second_scan = detect_alerts_by_agent(indexer_alerts_second_scan, 'CVE. affects.*') - for alert in indexer_alerts_second_scan: - agent = alert['agent']['name'] - if re.match('CVE. affects.*', alert['description']): - agents_vuln_second_scan[agent] = alert - - assert agents_vuln_second_scan == agents_vuln_first_scan, "Differences between first and second syscollector" \ + assert vuln_alerts_by_agent_first_scan == vuln_alerts_by_agent_second_scan, "Differences between first and second syscollector" \ f"First: Scan: {agents_vuln_first_scan}" \ f"Second Scan: {agents_vuln_second_scan}" @@ -294,7 +313,6 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): with open(os.path.join(current_dir, os.path.join('cases', 'test_vulnerability.yaml')), 'r') as cases_file: cases = yaml.load(cases_file, Loader=yaml.FullLoader) - complete_list = [ ( case['preconditions'] if 'preconditions' in case else None, From 3b835aa477450f771d51186103c0e3f25db19757 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 5 Dec 2023 12:17:38 +0000 Subject: [PATCH 075/174] feat: include packages for VD E2E testing --- .../wazuh_testing/end_to_end/indexer_api.py | 11 +- .../end_to_end/remote_operations_handler.py | 119 +------ .../end_to_end/vulnerability_detector.py | 110 +++++++ .../cases/test_vulnerability.yaml | 303 +++++++++++++++--- .../test_vulnerability_detector.py | 3 +- 5 files changed, 377 insertions(+), 169 deletions(-) create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index 62870887df..6af4a2da3a 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -22,7 +22,7 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': 'admin', 'password': 'changeme'}, - index: str = 'wazuh-alerts*') -> Dict: + index: str = 'wazuh-alerts*', greater_than_timestamp=None) -> Dict: """ Get values from the Wazuh Indexer API. @@ -46,6 +46,15 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' "match_all": {} } } + if greater_than_timestamp: + data['query'].update( + { + 'range': { + "@timestamp": { + "gte": greater_than_timestamp + } + } + }) param = { 'pretty': 'true', 'size': 10000, diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index a638adecdb..45980eea99 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -32,6 +32,8 @@ from wazuh_testing.end_to_end.waiters import wait_until_vuln_scan_agents_finished from wazuh_testing.end_to_end.regex import get_event_regex from wazuh_testing.end_to_end.logs import truncate_remote_host_group_files +from wazuh_testing.end_to_end.vulnerability_detector import check_vuln_alert_indexer, check_vuln_state_index + def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_manager: HostManager, current_datetime: str = None): @@ -58,7 +60,6 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man system = host_manager.get_host_variables(host)['os'].split('_')[0] if operation == 'install_package': - print("Installing package") package_data = operation_data['package'] package_url = package_data[host_os_name][host_os_arch] @@ -81,6 +82,7 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man monitoring_events_multihost(host_manager, monitoring_data) truncate_remote_host_group_files(host_manager, 'manager', 'logs') + # Wait until VD scan wait_until_vuln_scan_agents_finished(host_manager) @@ -113,116 +115,6 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man check_vuln_state_index(host_manager, operation_data['vulnerability_data'], current_datetime) -def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[str, Dict], current_datetime: str = None): - """ - Check vulnerability state index for a host. - - Args: - host_manager (HostManager): An instance of the HostManager class containing information about hosts. - vulnerability_data (dict): Dictionary containing vulnerability data. - - ToDo: - Implement the functionality. - """ - index_vuln_state_content = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities')['hits']['hits'] - expected_alerts_not_found = [] - - for agent in host_manager.get_group_hosts('agent'): - host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] - host_os_arch = host_manager.get_host_variables(agent)['architecture'] - - if host_os_name in vulnerability_data and host_os_arch in vulnerability_data: - vulnerabilities = vulnerability_data[host_os_name][host_os_arch] - for vulnerability in vulnerabilities: - - for indice_vuln in index_vuln_state_content: - state_agent = indice_vuln['agent']['name'] - state_cve = indice_vuln['vulnerability']['enumeration'] - state_package_name = indice_vuln['package']['name'] - state_package_version = indice_vuln['agent']['version'] - found = False - - if state_agent == agent and state_cve == vulnerability['CVE'] \ - and state_package_name == vulnerability['PACKAGE_NAME'] and \ - state_package_version == vulnerability['PACKAGE_VERSION']: - found = True - - if not found: - expected_alerts_not_found.append(vulnerability) - - assert len(expected_alerts_not_found) == 0, f"Expected alerts were not found {expected_alerts_not_found}" - - -def detect_alerts_by_agent(alerts, regex, current_datetime=None): - alerts_vuln_by_agent = {} - for alert in alerts: - valid_timestamp = True - if current_datetime: - dt = datetime.strptime(alert['_source']['timestamp'], "%Y-%m-%dT%H:%M:%S.%f%z") - - # Convert datetime to Unix timestamp (integer) - timestamp = int(dt.timestamp()) - if timestamp < current_datetime: - valid_timestamp = False - - if valid_timestamp: - if re.match(regex, alert['_source']['rule']['description']): - if 'agent' in alert['_source']: - agent = alert['_source']['agent']['name'] - if agent not in alerts_vuln_by_agent: - alerts_vuln_by_agent[agent] = [] - else: - alerts_vuln_by_agent[agent].append(alert) - - return alerts_vuln_by_agent - - -def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict[str, Dict], current_datetime: str = None): - """ - Check vulnerability alerts in the indexer for a host. - - Args: - host_manager (HostManager): An instance of the HostManager class containing information about hosts. - vulnerability_data (dict): Dictionary containing vulnerability data. - - Returns: - list: List of vulnerability alerts. - """ - regex_cve_affects = f"CVE.* affects .*" - regex_solved_vuln = f"The .* that affected .* was solved due to a package removal" - - - indexer_alerts = get_indexer_values(host_manager)['hits']['hits'] - # Get CVE affects alerts for all agents - detected_vuln_alerts_by_agent = detect_alerts_by_agent(indexer_alerts, regex_cve_affects, current_datetime) - solved_alerts_by_agent = detect_alerts_by_agent(indexer_alerts, regex_solved_vuln, current_datetime) - triggered_alerts = detected_vuln_alerts_by_agent - expected_alerts_not_found = [] - - if 'state' in vulnerability_data and not vulnerability_data['state']: - triggered_alerts = solved_alerts_by_agent - - for agent in host_manager.get_group_hosts('agent'): - host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] - host_os_arch = host_manager.get_host_variables(agent)['architecture'] - - if host_os_name in vulnerability_data and host_os_arch in vulnerability_data: - vulnerabilities = vulnerability_data[host_os_name][host_os_arch] - for vulnerability in vulnerabilities: - cve = vulnerability['CVE'] - package = vulnerabilities['PACKAGE'] - version = vulnerabilities['VERSION'] - found = False - for triggered_alert in triggered_alerts[agent]: - if triggered_alert['cve'] == cve and triggered_alert['package'] == package and \ - triggered_alert['version'] == version: - found = True - if not found: - expected_alerts_not_found.append(vulnerability) - - assert len(expected_alerts_not_found) == 0, f"Expected alerts were not found {expected_alerts_not_found}" - - def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict], host_manager: HostManager): """ Launch sequential remote operations on an agent. @@ -232,11 +124,8 @@ def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict task_list (list): List of dictionaries containing operation details. host_manager (HostManager): An instance of the HostManager class containing information about hosts. """ - # Get the current datetime in UTC - current_datetime = datetime.now(timezone.utc) - # Convert datetime to Unix timestamp (integer) - timestamp = int(current_datetime.timestamp()) + timestamp = datetime.utcnow().isoformat() if task_list: for task in task_list: diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py new file mode 100644 index 0000000000..074486b490 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py @@ -0,0 +1,110 @@ + +def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[str, Dict], current_datetime: str = None): + """ + Check vulnerability state index for a host. + + Args: + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + vulnerability_data (dict): Dictionary containing vulnerability data. + + ToDo: + Implement the functionality. + """ + index_vuln_state_content = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', + greater_than_timestamp=current_datetime)['hits']['hits'] + expected_alerts_not_found = [] + + for agent in host_manager.get_group_hosts('agent'): + host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] + host_os_arch = host_manager.get_host_variables(agent)['architecture'] + + if host_os_name in vulnerability_data and host_os_arch in vulnerability_data: + vulnerabilities = vulnerability_data[host_os_name][host_os_arch] + for vulnerability in vulnerabilities: + + for indice_vuln in index_vuln_state_content: + state_agent = indice_vuln['agent']['name'] + state_cve = indice_vuln['vulnerability']['enumeration'] + state_package_name = indice_vuln['package']['name'] + state_package_version = indice_vuln['agent']['version'] + found = False + + if state_agent == agent and state_cve == vulnerability['CVE'] \ + and state_package_name == vulnerability['PACKAGE_NAME'] and \ + state_package_version == vulnerability['PACKAGE_VERSION']: + found = True + + if not found: + expected_alerts_not_found.append(vulnerability) + + assert len(expected_alerts_not_found) == 0, f"Expected alerts were not found {expected_alerts_not_found}" + + +def detect_alerts_by_agent(alerts, regex, current_datetime=None): + alerts_vuln_by_agent = {} + for alert in alerts: + valid_timestamp = True + if current_datetime: + dt = datetime.strptime(alert['_source']['timestamp'], "%Y-%m-%dT%H:%M:%S.%f%z") + + # Convert datetime to Unix timestamp (integer) + timestamp = int(dt.timestamp()) + if timestamp < current_datetime: + valid_timestamp = False + + if valid_timestamp: + if re.match(regex, alert['_source']['rule']['description']): + if 'agent' in alert['_source']: + agent = alert['_source']['agent']['name'] + if agent not in alerts_vuln_by_agent: + alerts_vuln_by_agent[agent] = [] + else: + alerts_vuln_by_agent[agent].append(alert) + + return alerts_vuln_by_agent + + +def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict[str, Dict], current_datetime: str = None): + """ + Check vulnerability alerts in the indexer for a host. + + Args: + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + vulnerability_data (dict): Dictionary containing vulnerability data. + + Returns: + list: List of vulnerability alerts. + """ + regex_cve_affects = f"CVE.* affects .*" + regex_solved_vuln = f"The .* that affected .* was solved due to a package removal" + + + indexer_alerts = get_indexer_values(host_manager, greater_than_timestamp=current_datetime)['hits']['hits'] + # Get CVE affects alerts for all agents + detected_vuln_alerts_by_agent = detect_alerts_by_agent(indexer_alerts, regex_cve_affects, current_datetime) + solved_alerts_by_agent = detect_alerts_by_agent(indexer_alerts, regex_solved_vuln, current_datetime) + triggered_alerts = detected_vuln_alerts_by_agent + expected_alerts_not_found = [] + + if 'state' in vulnerability_data and not vulnerability_data['state']: + triggered_alerts = solved_alerts_by_agent + + for agent in host_manager.get_group_hosts('agent'): + host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] + host_os_arch = host_manager.get_host_variables(agent)['architecture'] + + if host_os_name in vulnerability_data and host_os_arch in vulnerability_data: + vulnerabilities = vulnerability_data[host_os_name][host_os_arch] + for vulnerability in vulnerabilities: + cve = vulnerability['CVE'] + package = vulnerabilities['PACKAGE'] + version = vulnerabilities['VERSION'] + found = False + for triggered_alert in triggered_alerts[agent]: + if triggered_alert['cve'] == cve and triggered_alert['package'] == package and \ + triggered_alert['version'] == version: + found = True + if not found: + expected_alerts_not_found.append(vulnerability) + + assert len(expected_alerts_not_found) == 0, f"Expected alerts were not found {expected_alerts_not_found}" \ No newline at end of file diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 52f4fc2814..47190c146c 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -1,3 +1,7 @@ +# Upload packages +# https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-11.17-1PGDG.rhel7.x86_64.rpm +# https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-libs-11.17-1PGDG.rhel7.x86_64.rpm + - case: "Installation of a vulnerable package" id: "install_package" description: "Installation of a vulnerable package" @@ -8,8 +12,8 @@ target: agent package: centos: - amd64: https://nmap.org/dist/nmap-6.46-1.x86_64.rpm - arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.17-2PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.17-2PGDG.rhel7.aarch64.rpm] + amd64: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-libs-11.15-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-11.5-1PGDG.rhel7.x86_64.rpm] + arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.15-2PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.15-2PGDG.rhel7.aarch64.rpm] ubuntu: amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb @@ -27,13 +31,13 @@ vulnerability_data: centos: amd64: - - PACKAGE_NAME: "nmap" - PACKAGE_VERSION: "6.46-1" - CVE: CVE-2018-15173 + - PACKAGE_NAME: "postgresql11" + PACKAGE_VERSION: "11.15-1PGDG.rhel7" + CVE: CVE-2022-2625 arm64v8: - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.17.2" - CVE: CVE-2023-39417 + PACKAGE_VERSION: "11.15-1PGDG.rhel7" + CVE: CVE-2022-2625 ubuntu: amd64: - PACKAGE_NAME: "grafana" @@ -47,7 +51,7 @@ amd64: - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" PACKAGE_VERSION: "3.0.6" - CVE: CVE-2023-47360 + CVE: CVE-2019-12874 macos: amd64: - PACKAGE_NAME: "node" @@ -70,8 +74,8 @@ target: agent package: centos: - amd64: https://nmap.org/dist/nmap-6.47-1.x86_64.rpm - arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.18-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.18-1PGDG.rhel7.aarch64.rpm] + amd64: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-libs-11.16-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-11.16-1PGDG.rhel7.x86_64.rpm] + arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.16-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.16-1PGDG.rhel7.aarch64.rpm] ubuntu: amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb @@ -89,27 +93,27 @@ vulnerability_data: centos: amd64: - - PACKAGE_NAME: "nmap" - PACKAGE_VERSION: "6.47-1" - CVE: CVE-2018-15173 + - PACKAGE_NAME: "postgresql11" + PACKAGE_VERSION: "11.16-1PGDG.rhel7" + CVE: CVE-2022-2625 arm64v8: - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.17.2" - CVE: CVE-2023-39417 + PACKAGE_VERSION: "11.15-1PGDG.rhel7" + CVE: CVE-2022-2625 ubuntu: amd64: - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" + PACKAGE_VERSION: "8.5.6" CVE: CVE-2023-2183 arm64v8: - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" + PACKAGE_VERSION: "8.5.6" CVE: CVE-2023-2183 windows: amd64: - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" PACKAGE_VERSION: "3.0.7" - CVE: CVE-2023-47360 + CVE: CVE-2019-12874 macos: amd64: - PACKAGE_NAME: "node" @@ -120,7 +124,6 @@ PACKAGE_VERSION: "17.1.0" CVE: CVE-2022-21824 -# Check that previous vuln does not longer appear and that new appear - case: "Upgrade of a vulnerable package: New vulnerability" id: "upgrade_package_new_vulnerability" description: "Upgrade of a vulnerable package that become vulnerable to another CVE" @@ -132,62 +135,202 @@ target: agent package: centos: - amd64: https://nmap.org/dist/nmap-6.47-1.x86_64.rpm - arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.18-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.18-1PGDG.rhel7.aarch64.rpm] + amd64: [https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-x86_64/postgresql12-libs-12.13-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-x86_64/postgresql12-12.13-1PGDG.rhel7.x86_64.rpm] + arm64v8: [https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-aarch64/postgresql12-libs-12.13-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-aarch64/postgresql12-12.13-1PGDG.rhel7.aarch64.rpm] ubuntu: - amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb - arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb + amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_amd64.deb + arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb windows: - amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.7-win64.exe + amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.8-win64.exe macos: - amd64: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg - arm64v8: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg + amd64: https://nodejs.org/dist/v17.1.0/node-v18.0.0.pkg + arm64v8: https://nodejs.org/dist/v17.1.0/node-v18.0.0.pkg - operation: check_agent_vulnerability target: agent parameters: alert_indexed: False state_indice: False + state: False vulnerability_data: centos: amd64: - - PACKAGE_NAME: "nmap" - PACKAGE_VERSION: "6.47-1" - CVE: CVE-2018-15173 + - PACKAGE_NAME: "postgresql12" + CVE: CVE-2022-41862 arm64v8: - - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.17.2" - CVE: CVE-2023-39417 + - PACKAGE_NAME: "postgresql12" + CVE: CVE-2022-41862 ubuntu: amd64: - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" CVE: CVE-2023-2183 arm64v8: - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" CVE: CVE-2023-2183 windows: amd64: - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" - PACKAGE_VERSION: "3.0.7" - CVE: CVE-2023-47360 + CVE: CVE-2019-12874 macos: amd64: - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.1.0" CVE: CVE-2022-21824 arm64v8: - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.1.0" CVE: CVE-2022-21824 + - operation: check_agent_vulnerability + target: agent + parameters: + alert_indexed: False + state_indice: False + vulnerability_data: + centos: + amd64: + - PACKAGE_NAME: "postgresql12" + PACKAGE_VERSION: "12.13-1PGDG.rhel7" + CVE: CVE-2022-2625 + arm64v8: + - PACKAGE_NAME: "postgresql12" + PACKAGE_VERSION: "12.13-1PGDG.rhel7" + CVE: CVE-2022-2625 + ubuntu: + amd64: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "9.1.1" + CVE: CVE-2023-1387 + arm64v8: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "9.1.1s" + CVE: CVE-2023-1387 + windows: + amd64: + - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" + PACKAGE_VERSION: "3.0.8" + CVE: CVE-2019-18278 + macos: + amd64: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "18.0.0" + CVE: CVE-2023-38552 + + arm64v8: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "18.0.0" + CVE: CVE-2023-38552 + +# -------------- +# +# +- case: "Upgrade of a vulnerable package: Another vuln and maintain original vulnerability" + id: "upgrade_package_new_vulnerability_and_maintain" + description: "Upgrade of a vulnerable package that become vulnerable to another CVE and maintain the previous one" + preconditions: + tasks: + - operation: install_package + target: agent + package: + windows: + amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.10-win64.exe + - operation: check_agent_vulnerability + target: agent + parameters: + alert_indexed: FalseSee + state_indice: False + vulnerability_data: + amd64: + - PACKAGE_NAME: "vlc" + PACKAGE_VERSION: "3.0.10" + CVE: CVE-2023-47360 + + depends: "upgrade_package_remain_vulnerable" + body: + tasks: + - operation: install_package + target: agent + package: + centos: + amd64: [https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-x86_64/postgresql13-libs-13.00-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-x86_64/postgresql13-13.00-1PGDG.rhel7.x86_64.rpm] + arm64v8: [https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-aarch64/postgresql13-libs-13.00-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-aarch64/postgresql13-13.0-1PGDG.rhel7.aarch64.rpm] + ubuntu: + amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_amd64.deb + arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_arm64.deb + windows: + amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.11-win64.exe + macos: + amd64: https://nodejs.org/dist/v17.1.0/node-v18.3.0.pkg + arm64v8: https://nodejs.org/dist/v17.1.0/node-v18.3.0.pkg + + - operation: check_agent_vulnerability + target: agent + parameters: + alert_indexed: False + state_indice: False + vulnerability_data: + centos: + amd64: + - PACKAGE_NAME: "postgresql13" + PACKAGE_VERSION: "13.00-1PGDG.rhel7" + CVE: CVE-2022-2625 + - PACKAGE_NAME: "postgresql13" + PACKAGE_VERSION: "13.00-1PGDG.rhel7" + CVE: CVE-2021-23222 + arm64v8: + - PACKAGE_NAME: "postgresql13" + PACKAGE_VERSION: "13.00-1PGDG.rhel7" + CVE: CVE-2022-2625 + - PACKAGE_NAME: "postgresql13" + PACKAGE_VERSION: "13.00-1PGDG.rhel7" + CVE: CVE-2021-23222 + ubuntu: + amd64: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "9.2.0" + CVE: CVE-2023-1387 + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "9.2.0" + CVE: CVE-2023-22462 + arm64v8: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "9.2.0" + CVE: CVE-2023-1387 + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "9.2.0" + CVE: CVE-2023-22462 + windows: + amd64: + - PACKAGE_NAME: "vlc" + PACKAGE_VERSION: "3.0.11" + CVE: CVE-2023-47360 + - PACKAGE_NAME: "vlc" + PACKAGE_VERSION: "3.0.11" + CVE: CVE-2021-25801 + macos: + amd64: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "18.3.0" + CVE: CVE-2023-38552 + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "18.3.0" + CVE: CVE-2023-32559 + arm64v8: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "18.3.0" + CVE: CVE-2023-38552 + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "18.3.0" + CVE: CVE-2023-32559 + + + +# Updating a vulnerable package that ceases to be vulnerable + -# Check that previous vuln appear and a new one -# Check no new vuln appear + +# Deleting a vulnerable package # Delete non-vulnerable package @@ -198,8 +341,61 @@ - case: "Removal of vulnerable package" id: "remove_vulnerable_package" description: "Installation of a vulnerable package" - preconditions: null - depends: "install_package" + preconditions: + tasks: + - operation: install_package + target: agent + package: + centos: + amd64: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-libs-11.15-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-11.5-1PGDG.rhel7.x86_64.rpm] + arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.15-2PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.15-2PGDG.rhel7.aarch64.rpm] + ubuntu: + amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb + arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb + windows: + amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe + macos: + amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg + arm64v8: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg + + - operation: check_agent_vulnerability + target: agent + parameters: + alert_indexed: False + state_indice: False + vulnerability_data: + centos: + amd64: + - PACKAGE_NAME: "postgresql11" + PACKAGE_VERSION: "11.15-1PGDG.rhel7" + CVE: CVE-2022-2625 + arm64v8: + - PACKAGE_NAME: "postgresql11" + PACKAGE_VERSION: "11.15-1PGDG.rhel7" + CVE: CVE-2022-2625 + ubuntu: + amd64: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.5" + CVE: CVE-2023-2183 + arm64v8: + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.5" + CVE: CVE-2023-2183 + windows: + amd64: + - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" + PACKAGE_VERSION: "3.0.6" + CVE: CVE-2019-12874 + macos: + amd64: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.0.1" + CVE: CVE-2022-21824 + arm64v8: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.0.1" + CVE: CVE-2022-21824 body: tasks: - operation: remove_package @@ -215,24 +411,23 @@ amd64: vlc macos: amd64: node* + - operation: check_agent_vulnerability target: agent parameters: alert_indexed: False - api: True - alert: False state_indice: False state: False vulnerability_data: centos: amd64: - - PACKAGE_NAME: "nmap" - PACKAGE_VERSION: "6.46-1" - CVE: CVE-2018-15173 + - PACKAGE_NAME: "postgresql11" + PACKAGE_VERSION: "11.15-1PGDG.rhel7" + CVE: CVE-2022-2625 arm64v8: - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.17.2" - CVE: CVE-2023-39417 + PACKAGE_VERSION: "11.15-1PGDG.rhel7" + CVE: CVE-2022-2625 ubuntu: amd64: - PACKAGE_NAME: "grafana" @@ -244,14 +439,18 @@ CVE: CVE-2023-2183 windows: amd64: - - PACKAGE_NAME: "vlc" + - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" PACKAGE_VERSION: "3.0.6" - CVE: CVE-2023-47360 + CVE: CVE-2019-12874 macos: amd64: - PACKAGE_NAME: "node" PACKAGE_VERSION: "17.0.1" CVE: CVE-2022-21824 + arm64v8: + - PACKAGE_NAME: "node" + PACKAGE_VERSION: "17.0.1" + CVE: CVE-2022-21824 # ---------------------------------------------------------------------------------- @@ -405,4 +604,4 @@ # PACKAGE_VERSION: "17.1.0" # CVE: CVE-2022-21824 -# # ----------------------------------------------------------- \ No newline at end of file +# # ----------------------------------------------------------- diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 70ea2d551b..d8b62d8ac6 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -296,7 +296,8 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): indexer_alerts_second_scan = get_indexer_values(host_manager)['hits']['hits'] vuln_alerts_by_agent_second_scan = detect_alerts_by_agent(indexer_alerts_second_scan, 'CVE. affects.*') - assert vuln_alerts_by_agent_first_scan == vuln_alerts_by_agent_second_scan, "Differences between first and second syscollector" \ + assert vuln_alerts_by_agent_first_scan == vuln_alerts_by_agent_second_scan, \ + "Differences between first and second syscollector" \ f"First: Scan: {agents_vuln_first_scan}" \ f"Second Scan: {agents_vuln_second_scan}" From 54937f001b3ffdd9925e24bc18e7c235f67bf450 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 28 Dec 2023 14:31:45 +0000 Subject: [PATCH 076/174] fix: imports errors --- .../wazuh_testing/end_to_end/vulnerability_detector.py | 7 +++++++ .../test_vulnerability_detector.py | 3 ++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py index 074486b490..0ce35c57bc 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py @@ -1,3 +1,10 @@ +from wazuh_testing.tools.system import HostManager +from wazuh_testing.end_to_end.indexer_api import get_indexer_values + +from typing import Dict +from datetime import datetime +import re + def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[str, Dict], current_datetime: str = None): """ diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index d8b62d8ac6..e2a8458085 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -58,7 +58,8 @@ from wazuh_testing.end_to_end.indexer_api import get_indexer_values from wazuh_testing.tools.configuration import load_configuration_template from wazuh_testing.tools.system import HostManager -from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations, detect_alerts_by_agent +from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations +from wazuh_testing.end_to_end.vulnerability_detector import detect_alerts_by_agent from wazuh_testing.modules.syscollector import TIMEOUT_SYSCOLLECTOR_SCAN From ed0f26d8410d165f87a87ea7531925edf35b9c56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 4 Jan 2024 16:26:29 +0000 Subject: [PATCH 077/174] fix: centos vuln package --- .../cases/test_vulnerability.yaml | 24 ++++++++----------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 47190c146c..49b6667af3 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -1,7 +1,3 @@ -# Upload packages -# https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-11.17-1PGDG.rhel7.x86_64.rpm -# https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-libs-11.17-1PGDG.rhel7.x86_64.rpm - - case: "Installation of a vulnerable package" id: "install_package" description: "Installation of a vulnerable package" @@ -12,8 +8,8 @@ target: agent package: centos: - amd64: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-libs-11.15-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-11.5-1PGDG.rhel7.x86_64.rpm] - arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.15-2PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.15-2PGDG.rhel7.aarch64.rpm] + amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.x86_64.rpm + arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.aarch64.rpm ubuntu: amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb @@ -31,13 +27,13 @@ vulnerability_data: centos: amd64: - - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.15-1PGDG.rhel7" - CVE: CVE-2022-2625 + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.5" + CVE: CVE-2023-2183 arm64v8: - - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.15-1PGDG.rhel7" - CVE: CVE-2022-2625 + - PACKAGE_NAME: "grafana" + PACKAGE_VERSION: "8.5.5" + CVE: CVE-2023-2183 ubuntu: amd64: - PACKAGE_NAME: "grafana" @@ -74,8 +70,8 @@ target: agent package: centos: - amd64: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-libs-11.16-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-11.16-1PGDG.rhel7.x86_64.rpm] - arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.16-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.16-1PGDG.rhel7.aarch64.rpm] + amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb + arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb ubuntu: amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb From 61831e0e7998f21477fe62a49b6a5a3aa3bacb54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 8 Jan 2024 10:40:01 +0000 Subject: [PATCH 078/174] feat: support new syscollector format --- .../wazuh_testing/data/syscollector.py | 171 +++++++++++++++++- .../wazuh_testing/tools/agent_simulator.py | 124 ++++++++++--- 2 files changed, 261 insertions(+), 34 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/data/syscollector.py b/deps/wazuh_testing/wazuh_testing/data/syscollector.py index 50cbde7e36..086056bf8e 100644 --- a/deps/wazuh_testing/wazuh_testing/data/syscollector.py +++ b/deps/wazuh_testing/wazuh_testing/data/syscollector.py @@ -1,25 +1,25 @@ -SYSCOLLECTOR_HEADER = '{"type":"",' \ +LEGACY_SYSCOLLECTOR_HEADER = '{"type":"",' \ '"ID":,"timestamp":""' -SYSCOLLECTOR_OS_EVENT_TEMPLATE = ',"inventory":{"os_name":"",' \ +LEGACY_SYSCOLLECTOR_OS_EVENT_TEMPLATE = ',"inventory":{"os_name":"",' \ '"os_major":"8","os_minor":"3","os_version":"8.3",' \ '"os_platform":"centos","sysname":"Linux",' \ '"hostname":"centos3","release":"4.18.0-240.1.1.el8_3.x86_64",' \ '"version":"#1 SMP Thu Nov 19 17:20:08 UTC 2020","architecture":"x86_64"}}' -SYSCOLLECTOR_HARDWARE_EVENT_TEMPLATE = ',"inventory":{"board_serial":"0",' \ +LEGACY_SYSCOLLECTOR_HARDWARE_EVENT_TEMPLATE = ',"inventory":{"board_serial":"0",' \ '"cpu_name":"AMD Ryzen 7 3750H with Radeon Vega Mobile Gfx",' \ '"cpu_cores":,"cpu_MHz":2295.686,"ram_total":828084,' \ '"ram_free":60488,"ram_usage":93}}' -SYSCOLLECTOR_PACKAGES_EVENT_TEMPLATE = ',"program":{"format":"rpm","name":"",' \ +LEGACY_SYSCOLLECTOR_PACKAGES_EVENT_TEMPLATE = ',"program":{"format":"rpm","name":"",' \ '"description":"JSON::XS compatible pure-Perl module",' \ '"size":126,"vendor":"CentOS","group":"Unspecified",' \ '"architecture":"noarch","source":"perl-JSON-PP-2.97.001-3.el8.src.rpm",' \ '"install_time":"2021/03/12 12:23:17"' \ ',"version":"1:2.97.001-3.el8"}}' -SYSCOLLECTOR_PROCESS_EVENT_TEMPLATE = ',"process":{"pid":3150,"name":"","state":"R",' \ +LEGACY_SYSCOLLECTOR_PROCESS_EVENT_TEMPLATE = ',"process":{"pid":3150,"name":"","state":"R",' \ '"ppid":2965,"utime":58,' \ '"stime":2,"cmd":"rpm","argvs":["-qa","xorg-x11*"],' \ '"euser":"root","ruser":"root","suser":"root","egroup":"ossec",' \ @@ -31,7 +31,7 @@ '"session":3150,"nlwp":1,' \ '"tgid":3150,"tty":0,"processor":0}}' -SYSCOLLECTOR_NETWORK_EVENT_TEMPLATE = ',"iface":{"name":"","type":"ethernet","state":"up",' \ +LEGACY_SYSCOLLECTOR_NETWORK_EVENT_TEMPLATE = ',"iface":{"name":"","type":"ethernet","state":"up",' \ '"MAC":"08:00:27:be:ce:3a","tx_packets":2135,' \ '"rx_packets":9091,"tx_bytes":210748,' \ '"rx_bytes":10134272,"tx_errors":0,' \ @@ -40,9 +40,164 @@ '"netmask":["255.255.255.0"],"broadcast":["10.0.2.255"],' \ '"metric":100,"gateway":"10.0.2.2","DHCP":"enabled"}}}' -SYSCOLLECTOR_PORT_EVENT_TEMPLATE = ',"port":{"protocol":"tcp","local_ip":"0.0.0.0",' \ +LEGACY_SYSCOLLECTOR_PORT_EVENT_TEMPLATE = ',"port":{"protocol":"tcp","local_ip":"0.0.0.0",' \ '"local_port":,"remote_ip":"0.0.0.0",' \ '"remote_port":0,"tx_queue":0,' \ '"rx_queue":0,"inode":22273,"state":"listening"}}' +1 +LEGACY_SYSCOLLECTOR_HOTFIX_EVENT_TEMPLATE = ',"hotfix":""}' -SYSCOLLECTOR_HOTFIX_EVENT_TEMPLATE = ',"hotfix":""}' + + + + +SYSCOLLECTOR_PACKAGE_DELTA_DATA_TEMPLATE = { + "architecture": "", + "checksum":"", + "description":"", + "format":"", + "groups":"editors", + "install_time":"", + "item_id":"", + "location":" ", + "multiarch": "null", + "name":"", + "priority":"optional", + "scan_time":"2023/12/19 15:32:25", + "size":"", + "source":"", + "vendor":"", + "version":"" +} + + +SYSCOLLECTOR_HOTFIX_DELTA_DATA_TEMPLATE = { + "checksum":"", + "hotfix":"", + "scan_time":"" +} + +SYSCOLLECTOR_OSINFO_DELTA_EVENT_TEMPLATE = { + "checksum":"1634140017886803554", + "architecture":"x86_64", + "hostname":"", + "os_codename":"focal", + "os_major":"20", + "os_minor":"04", + "os_name":"Ubuntu", + "os_platform":"ubuntu", + "os_patch": "6", + "os_release":"sp1", + "os_version":"20.04.6 LTS (Focal Fossa)", + "os_build":"4.18.0-305.12.1.el8_4.x86_64", + "release":"6.2.6-76060206-generic", + "scan_time":"2023/12/20 11:24:58", + "sysname":"Linux", + "version":"#202303130630~1689015125~22.04~ab2190e SMP PREEMPT_DYNAMIC" +} + +SYSCOLLECTOR_PROCESSSES_DELTA_EVENT_TEMPLATE = { + "argvs":"", + "euser":"", + "fgroup":"", + "name":"", + "nice":"", + "nlwp":"", + "pgrp":"", + "ppid":"", + "priority":"", + "processor":"", + "resident":"", + "rgroup":"", + "scan_time":"", + "session":"", + "sgroup":"", + "share":"", + "size":"", + "start_time":"", + "state":"S", + "stime":"", + "suser":"", + "tgid":"", + "tty":"", + "utime":"", + "vm_size":"", + "cmd":"", + "egroup":"", + "ruser":"" +} + +SYSCOLLECTOR_PORTS_DELTA_EVENT_TEMPLATE = { + "checksum":"", + "item_id":"", + "local_ip":"0.0.0.0", + "local_port":"", + "pid":"", + "process":"NULL", + "protocol":"tcp", + "remote_ip":"0.0.0.0", + "remote_port":"", + "rx_queue":"", + "scan_time":"", + "state":"listening", + "tx_queue":"" +} + + +SYSCOLLECTOR_HWINFO_DELTA_EVENT_TEMPLATE = { + "scan_time":"", + "board_serial":"", + "checksum":"", + "cpu_mhz":"", + "cpu_cores":"", + "cpu_name":"", + "ram_free":"", + "ram_total":"", + "ram_usage":"" +} + + +SYSCOLLECTOR_NETWORK_IFACE_DELTA_EVENT_TEMPLATE = { + "adapter":"null", + "checksum":"", + "item_id":"", + "mac":"", + "mtu":"", + "name":"", + "rx_bytes":"", + "rx_dropped":"", + "rx_errors":"", + "rx_packets":"", + "scan_time":"", + "state":"", + "tx_bytes":"", + "tx_dropped":"", + "tx_errors":"", + "tx_packets":"", + "type":"" +} + + +SYSCOLLECTOR_NETWORK_NETADDR_DELTA_EVENT_TEMPLATE = { + "id": "", + "scan_id": "", + "proto": "", + "address": "192.168.1.87", + "netmask": "255.255.255.0", + "broadcast": "192.168.1.255", + "checksum": "" +} + + +SYSCOLLECTOR_NETWORK_NETPRO_DELTA_EVENT_TEMPLATE = { + "id": "", + "scan_id": "", + "iface": "eth0", + "type": "ipv4", + "gateway": "192.168.1.1", + "dhcp": "enabled", + "checksum": "", + "item_id": "" +} diff --git a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py index 38a4244bc2..55c8d0c9e8 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py +++ b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py @@ -119,7 +119,11 @@ def __init__(self, manager_address, cypher="aes", os=None, rootcheck_sample=None rootcheck_frequency=60.0, rcv_msg_limit=0, keepalive_frequency=10.0, sca_frequency=60, syscollector_frequency=60.0, syscollector_batch_size=10, hostinfo_eps=100, winevt_eps=100, fixed_message_size=None, registration_address=None, retry_enrollment=False, - logcollector_msg_number=None, custom_logcollector_message=''): + logcollector_msg_number=None, custom_logcollector_message='', + syscollector_message_type_list=['packages'], + # syscollector_message_type_list=['network', 'port', 'hotfix', 'process', 'packages', 'osinfo', 'hwinfo'], + syscollector_packages_vuln_content=None, + syscollector_message_old_format=False): self.id = id self.name = name self.key = key @@ -133,7 +137,12 @@ def __init__(self, manager_address, cypher="aes", os=None, rootcheck_sample=None self.os = os self.fim_eps = fim_eps self.fim_integrity_eps = fim_integrity_eps + self.syscollector_eps = syscollector_eps + self.syscollector_message_type_list = syscollector_message_type_list + self.syscollector_message_old_format = syscollector_message_old_format + self.syscollector_packages_vuln_content = syscollector_packages_vuln_content + self.rootcheck_eps = rootcheck_eps self.logcollector_eps = logcollector_eps self.winevt_eps = winevt_eps @@ -654,7 +663,10 @@ def init_sca(self): def init_syscollector(self): """Initialize syscollector module.""" if self.syscollector is None: - self.syscollector = GeneratorSyscollector(self.name, self.syscollector_batch_size) + self.syscollector = GeneratorSyscollector(self.name, self.syscollector_message_type_list, + self.syscollector_message_old_format, + self.syscollector_batch_size, + self.syscollector_packages_vuln_content) def init_rootcheck(self): """Initialize rootcheck module.""" @@ -747,11 +759,35 @@ class GeneratorSyscollector: agent_name (str): Name of the agent. batch_size (int): Number of messages of the same type """ - def __init__(self, agent_name, batch_size): + + def __init__(self, agent_name, event_types_list, old_format, batch_size, syscollector_packages_vuln_content): self.current_batch_events = -1 self.current_batch_events_size = 0 - self.list_events = ['network', 'port', 'hotfix', - 'process', 'program', 'OS', 'hardware'] + self.list_events = event_types_list + self.syscollector_event_type_mapping = { + 'packages': 'dbsync_packages', + 'hotfix': 'dbsync_hotfix', + 'hwinfo': 'dbsync_hwinfo', + 'port': 'dbsync_ports', + 'osinfo': 'dbsync_osinfo', + 'network': 'dbsync_network_iface', + 'process': 'dbsync_processes' + } + self.syscollector_packages_vuln_content = syscollector_packages_vuln_content + + + + self.default_package_data = { + '': 'A low-level cryptographic library', + '': 'x86_64', + '': 'deb', + '': 'nettle', + '': 'vim', + '': 'Ubuntu Developers ', + '': '2.7.1-9.el7_9' + } + + self.old_format = old_format self.agent_name = agent_name self.batch_size = batch_size self.syscollector_tag = 'syscollector' @@ -765,36 +801,67 @@ def format_event(self, message_type): Returns: str: the generated syscollector event message. """ - message = syscollector.SYSCOLLECTOR_HEADER - if message_type == 'network': - message += syscollector.SYSCOLLECTOR_NETWORK_EVENT_TEMPLATE - elif message_type == 'process': - message += syscollector.SYSCOLLECTOR_PROCESS_EVENT_TEMPLATE - elif message_type == 'port': - message += syscollector.SYSCOLLECTOR_PORT_EVENT_TEMPLATE - elif message_type == 'program': - message += syscollector.SYSCOLLECTOR_PACKAGES_EVENT_TEMPLATE - elif message_type == 'OS': - message += syscollector.SYSCOLLECTOR_OS_EVENT_TEMPLATE - elif message_type == 'hardware': - message += syscollector.SYSCOLLECTOR_HARDWARE_EVENT_TEMPLATE - elif message_type == 'hotfix': - message += syscollector.SYSCOLLECTOR_HOTFIX_EVENT_TEMPLATE - elif 'end' in message_type: - message += '}' + if self.old_format: + message = syscollector.LEGACY_SYSCOLLECTOR_HEADER + if message_type == 'network': + message += syscollector.LEGACY_SYSCOLLECTOR_NETWORK_EVENT_TEMPLATE + elif message_type == 'process': + message += syscollector.LEGACY_SYSCOLLECTOR_PROCESS_EVENT_TEMPLATE + elif message_type == 'port': + message += syscollector.LEGACY_SYSCOLLECTOR_PORT_EVENT_TEMPLATE + elif message_type == 'packages': + message += syscollector.LEGACY_SYSCOLLECTOR_PACKAGES_EVENT_TEMPLATE + elif message_type == 'osinfo': + message += syscollector.LEGACY_SYSCOLLECTOR_OS_EVENT_TEMPLATE + elif message_type == 'hwinfo': + message += syscollector.LEGACY_SYSCOLLECTOR_HARDWARE_EVENT_TEMPLATE + elif message_type == 'hotfix': + message += syscollector.LEGACY_SYSCOLLECTOR_HOTFIX_EVENT_TEMPLATE + elif 'end' in message_type: + message += '}' + else: + message = {'type': self.syscollector_event_type_mapping[message_type]} + operation = 'INSERTED' if (message_type == 'osinfo' or message_type == 'packages') else 'MODIFIED' + + data_dict = {} + + if message_type == 'network': + data_dict = syscollector.SYSCOLLECTOR_NETWORK_IFACE_DELTA_EVENT_TEMPLATE + elif message_type == 'process': + data_dict = syscollector.SYSCOLLECTOR_PROCESSSES_DELTA_EVENT_TEMPLATE + elif message_type == 'port': + data_dict = syscollector.SYSCOLLECTOR_PORTS_DELTA_EVENT_TEMPLATE + elif message_type == 'packages': + data_dict = syscollector.SYSCOLLECTOR_PACKAGE_DELTA_DATA_TEMPLATE + elif message_type == 'osinfo': + data_dict = syscollector.SYSCOLLECTOR_OSINFO_DELTA_EVENT_TEMPLATE + elif message_type == 'hwinfo': + data_dict = syscollector.SYSCOLLECTOR_HWINFO_DELTA_EVENT_TEMPLATE + elif message_type == 'hotfix': + data_dict = syscollector.SYSCOLLECTOR_HOTFIX_DELTA_DATA_TEMPLATE + + message['data'] = data_dict + message['operation'] = operation + + message = str(rf'{message}') today = date.today() timestamp = today.strftime("%Y/%m/%d %H:%M:%S") - fields_to_replace = [ + generics_fields_to_replace = [ ('', self.agent_name), ('', f"{self.current_id}"), ('', get_random_string(10)), ('', timestamp), ('', message_type) ] - for variable, value in fields_to_replace: + for variable, value in generics_fields_to_replace: message = message.replace(variable, value) + if message_type == 'packages': + if not self.syscollector_packages_vuln_content: + for package_key, package_value in self.default_package_data.items(): + message = message.replace(package_key, package_value) + self.current_id += 1 message = f"{self.syscollector_mq}:{self.syscollector_tag}:{message}" @@ -813,13 +880,18 @@ def generate_event(self): self.current_batch_events_size = self.batch_size if self.list_events[self.current_batch_events] not in ['network', 'port', 'process'] \ - or self.current_batch_events_size > 1: + or self.current_batch_events_size > 1 or not self.old_format: event = self.list_events[self.current_batch_events] else: event = self.list_events[self.current_batch_events] + '_end' self.current_batch_events_size = self.current_batch_events_size - 1 - return self.format_event(event) + + + event_final = self.format_event(event) + print(event_final) + + return event_final class SCA: From c0323df1f1a107d3ed87db98c2ef7ce1b0524909 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 9 Jan 2024 16:22:09 +0000 Subject: [PATCH 079/174] fix: syscollector messages structure AS --- .../wazuh_testing/data/syscollector.py | 4 +- .../wazuh_testing/tools/agent_simulator.py | 44 ++++++++++++------- 2 files changed, 30 insertions(+), 18 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/data/syscollector.py b/deps/wazuh_testing/wazuh_testing/data/syscollector.py index 086056bf8e..b5b03ca686 100644 --- a/deps/wazuh_testing/wazuh_testing/data/syscollector.py +++ b/deps/wazuh_testing/wazuh_testing/data/syscollector.py @@ -159,7 +159,7 @@ SYSCOLLECTOR_NETWORK_IFACE_DELTA_EVENT_TEMPLATE = { - "adapter":"null", + "adapter": None, "checksum":"", "item_id":"", "mac":"", @@ -200,4 +200,4 @@ "dhcp": "enabled", "checksum": "", "item_id": "" -} +} \ No newline at end of file diff --git a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py index 55c8d0c9e8..5d409366a1 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py +++ b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py @@ -27,6 +27,7 @@ from struct import pack from sys import getsizeof from time import mktime, localtime, sleep, time +import re import wazuh_testing.data.syscollector as syscollector import wazuh_testing.data.winevt as winevt @@ -780,7 +781,7 @@ def __init__(self, agent_name, event_types_list, old_format, batch_size, syscoll self.default_package_data = { '': 'A low-level cryptographic library', '': 'x86_64', - '': 'deb', + '': 'rpm', '': 'nettle', '': 'vim', '': 'Ubuntu Developers ', @@ -820,30 +821,32 @@ def format_event(self, message_type): elif 'end' in message_type: message += '}' else: - message = {'type': self.syscollector_event_type_mapping[message_type]} + message_event_type = self.syscollector_event_type_mapping[message_type] operation = 'INSERTED' if (message_type == 'osinfo' or message_type == 'packages') else 'MODIFIED' + message_operation = operation - data_dict = {} + message_data = {} if message_type == 'network': - data_dict = syscollector.SYSCOLLECTOR_NETWORK_IFACE_DELTA_EVENT_TEMPLATE + message_data = syscollector.SYSCOLLECTOR_NETWORK_IFACE_DELTA_EVENT_TEMPLATE elif message_type == 'process': - data_dict = syscollector.SYSCOLLECTOR_PROCESSSES_DELTA_EVENT_TEMPLATE + message_data = syscollector.SYSCOLLECTOR_PROCESSSES_DELTA_EVENT_TEMPLATE elif message_type == 'port': - data_dict = syscollector.SYSCOLLECTOR_PORTS_DELTA_EVENT_TEMPLATE + message_data = syscollector.SYSCOLLECTOR_PORTS_DELTA_EVENT_TEMPLATE elif message_type == 'packages': - data_dict = syscollector.SYSCOLLECTOR_PACKAGE_DELTA_DATA_TEMPLATE + message_data = syscollector.SYSCOLLECTOR_PACKAGE_DELTA_DATA_TEMPLATE elif message_type == 'osinfo': - data_dict = syscollector.SYSCOLLECTOR_OSINFO_DELTA_EVENT_TEMPLATE + message_data = syscollector.SYSCOLLECTOR_OSINFO_DELTA_EVENT_TEMPLATE elif message_type == 'hwinfo': - data_dict = syscollector.SYSCOLLECTOR_HWINFO_DELTA_EVENT_TEMPLATE + message_data = syscollector.SYSCOLLECTOR_HWINFO_DELTA_EVENT_TEMPLATE elif message_type == 'hotfix': - data_dict = syscollector.SYSCOLLECTOR_HOTFIX_DELTA_DATA_TEMPLATE + message_data = syscollector.SYSCOLLECTOR_HOTFIX_DELTA_DATA_TEMPLATE - message['data'] = data_dict - message['operation'] = operation - - message = str(rf'{message}') + message = '{"type": "%s", "data": %s, "operation": "%s"}' % ( + message_event_type, + re.sub(r'\s', '', json.dumps(message_data)), + message_operation + ) today = date.today() timestamp = today.strftime("%Y/%m/%d %H:%M:%S") @@ -875,6 +878,7 @@ def generate_event(self): Returns: str: generated event with the desired format for syscollector """ + print("Generating event") if self.current_batch_events_size == 0: self.current_batch_events = (self.current_batch_events + 1) % len(self.list_events) self.current_batch_events_size = self.batch_size @@ -1656,11 +1660,13 @@ def keep_alive(self): def run_module(self, module): """Send a module message from the agent to the manager. Args: - module (str): Module name + module (str): Module name """ module_info = self.agent.modules[module] eps = module_info['eps'] if 'eps' in module_info else 1 frequency = module_info["frequency"] if 'frequency' in module_info else 1 + print(f"Defining frequency as {frequency}") + sleep(10) start_time = time() @@ -1700,6 +1706,7 @@ def run_module(self, module): while self.stop_thread == 0: sent_messages = 0 while sent_messages < batch_messages: + print("Send event") event_msg = module_event_generator() if self.agent.fixed_message_size is not None: event_msg_size = getsizeof(event_msg) @@ -1718,9 +1725,14 @@ def run_module(self, module): self.totalMessages += 1 sent_messages += 1 if self.totalMessages % eps == 0: + print("Sleeping") + print(self.totalMessages) + print(1.0 - ((time() - start_time) % 1.0)) sleep(1.0 - ((time() - start_time) % 1.0)) - if frequency > 1: + print("Sleeping freq") + print(frequency) + print(frequency - ((time() - start_time) % frequency)) sleep(frequency - ((time() - start_time) % frequency)) def run(self): From bf0eb17e99a355f2c2f11d7a9be07338b68aeb59 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 10 Jan 2024 15:54:11 +0000 Subject: [PATCH 080/174] refac: GeneratorSyscollector --- .../wazuh_testing/data/syscollector.py | 215 +++++++++--------- .../modules/syscollector/__init__.py | 1 + .../wazuh_testing/scripts/simulate_agents.py | 62 +++-- .../wazuh_testing/tools/agent_simulator.py | 128 ++++++----- 4 files changed, 228 insertions(+), 178 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/data/syscollector.py b/deps/wazuh_testing/wazuh_testing/data/syscollector.py index b5b03ca686..6fdfd5476e 100644 --- a/deps/wazuh_testing/wazuh_testing/data/syscollector.py +++ b/deps/wazuh_testing/wazuh_testing/data/syscollector.py @@ -1,3 +1,5 @@ +# Legacy Syscollector Templates + LEGACY_SYSCOLLECTOR_HEADER = '{"type":"",' \ '"ID":,"timestamp":""' @@ -40,142 +42,141 @@ '"netmask":["255.255.255.0"],"broadcast":["10.0.2.255"],' \ '"metric":100,"gateway":"10.0.2.2","DHCP":"enabled"}}}' -LEGACY_SYSCOLLECTOR_PORT_EVENT_TEMPLATE = ',"port":{"protocol":"tcp","local_ip":"0.0.0.0",' \ +LEGACY_SYSCOLLECTOR_PORTS_EVENT_TEMPLATE = ',"port":{"protocol":"tcp","local_ip":"0.0.0.0",' \ '"local_port":,"remote_ip":"0.0.0.0",' \ '"remote_port":0,"tx_queue":0,' \ '"rx_queue":0,"inode":22273,"state":"listening"}}' -1 -LEGACY_SYSCOLLECTOR_HOTFIX_EVENT_TEMPLATE = ',"hotfix":""}' - +LEGACY_SYSCOLLECTOR_HOTFIX_EVENT_TEMPLATE = ',"hotfix":""}' +# Delta Templates SYSCOLLECTOR_PACKAGE_DELTA_DATA_TEMPLATE = { - "architecture": "", - "checksum":"", - "description":"", - "format":"", - "groups":"editors", - "install_time":"", - "item_id":"", - "location":" ", - "multiarch": "null", - "name":"", - "priority":"optional", - "scan_time":"2023/12/19 15:32:25", - "size":"", - "source":"", - "vendor":"", - "version":"" + "architecture": "", + "checksum": "", + "description": "", + "format": "", + "groups": "editors", + "install_time": "", + "item_id": "", + "location": " ", + "multiarch": "null", + "name": "", + "priority": "optional", + "scan_time": "2023/12/19 15:32:25", + "size": "", + "source": "", + "vendor": "", + "version": "" } SYSCOLLECTOR_HOTFIX_DELTA_DATA_TEMPLATE = { - "checksum":"", - "hotfix":"", - "scan_time":"" + "checksum": "", + "hotfix": "", + "scan_time": "" } SYSCOLLECTOR_OSINFO_DELTA_EVENT_TEMPLATE = { - "checksum":"1634140017886803554", - "architecture":"x86_64", - "hostname":"", - "os_codename":"focal", - "os_major":"20", - "os_minor":"04", - "os_name":"Ubuntu", - "os_platform":"ubuntu", - "os_patch": "6", - "os_release":"sp1", - "os_version":"20.04.6 LTS (Focal Fossa)", - "os_build":"4.18.0-305.12.1.el8_4.x86_64", - "release":"6.2.6-76060206-generic", - "scan_time":"2023/12/20 11:24:58", - "sysname":"Linux", - "version":"#202303130630~1689015125~22.04~ab2190e SMP PREEMPT_DYNAMIC" + "checksum": "1634140017886803554", + "architecture": "x86_64", + "hostname": "", + "os_codename": "focal", + "os_major": "20", + "os_minor": "04", + "os_name": "Ubuntu", + "os_platform": "ubuntu", + "os_patch": "6", + "os_release": "sp1", + "os_version": "20.04.6 LTS (Focal Fossa)", + "os_build": "4.18.0-305.12.1.el8_4.x86_64", + "release": "6.2.6-76060206-generic", + "scan_time": "2023/12/20 11:24:58", + "sysname": "Linux", + "version": "#202303130630~1689015125~22.04~ab2190e SMP PREEMPT_DYNAMIC" } SYSCOLLECTOR_PROCESSSES_DELTA_EVENT_TEMPLATE = { - "argvs":"", - "euser":"", - "fgroup":"", - "name":"", - "nice":"", - "nlwp":"", - "pgrp":"", - "ppid":"", - "priority":"", - "processor":"", - "resident":"", - "rgroup":"", - "scan_time":"", - "session":"", - "sgroup":"", - "share":"", - "size":"", - "start_time":"", - "state":"S", - "stime":"", - "suser":"", - "tgid":"", - "tty":"", - "utime":"", - "vm_size":"", - "cmd":"", - "egroup":"", - "ruser":"" + "argvs": "", + "euser": "", + "fgroup": "", + "name": "", + "nice": "", + "nlwp": "", + "pgrp": "", + "ppid": "", + "priority": "", + "processor": "", + "resident": "", + "rgroup": "", + "scan_time": "", + "session": "", + "sgroup": "", + "share": "", + "size": "", + "start_time": "", + "state": "S", + "stime": "", + "suser": "", + "tgid": "", + "tty": "", + "utime": "", + "vm_size": "", + "cmd": "", + "egroup": "", + "ruser": "" } SYSCOLLECTOR_PORTS_DELTA_EVENT_TEMPLATE = { - "checksum":"", - "item_id":"", - "local_ip":"0.0.0.0", - "local_port":"", - "pid":"", - "process":"NULL", - "protocol":"tcp", - "remote_ip":"0.0.0.0", - "remote_port":"", - "rx_queue":"", - "scan_time":"", - "state":"listening", - "tx_queue":"" + "checksum": "", + "item_id": "", + "local_ip": "0.0.0.0", + "local_port": "", + "pid": "", + "process": "NULL", + "protocol": "tcp", + "remote_ip": "0.0.0.0", + "remote_port": "", + "rx_queue": "", + "scan_time": "", + "state": "listening", + "tx_queue": "" } SYSCOLLECTOR_HWINFO_DELTA_EVENT_TEMPLATE = { - "scan_time":"", - "board_serial":"", - "checksum":"", - "cpu_mhz":"", - "cpu_cores":"", - "cpu_name":"", - "ram_free":"", - "ram_total":"", - "ram_usage":"" + "scan_time": "", + "board_serial": "", + "checksum": "", + "cpu_mhz": "", + "cpu_cores": "", + "cpu_name": "", + "ram_free": "", + "ram_total": "", + "ram_usage": "" } SYSCOLLECTOR_NETWORK_IFACE_DELTA_EVENT_TEMPLATE = { "adapter": None, - "checksum":"", - "item_id":"", - "mac":"", - "mtu":"", - "name":"", - "rx_bytes":"", - "rx_dropped":"", - "rx_errors":"", - "rx_packets":"", - "scan_time":"", - "state":"", - "tx_bytes":"", - "tx_dropped":"", - "tx_errors":"", - "tx_packets":"", - "type":"" + "checksum": "", + "item_id": "", + "mac": "", + "mtu": "", + "name": "", + "rx_bytes": "", + "rx_dropped": "", + "rx_errors": "", + "rx_packets": "", + "scan_time": "", + "state": "", + "tx_bytes": "", + "tx_dropped": "", + "tx_errors": "", + "tx_packets": "", + "type": "" } @@ -200,4 +201,4 @@ "dhcp": "enabled", "checksum": "", "item_id": "" -} \ No newline at end of file +} diff --git a/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py b/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py index e69de29bb2..be43720674 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py @@ -0,0 +1 @@ +SYSCOLLECTOR_DELTA_EVENT_TYPES = ['packages', 'hotfix', 'hwinfo', 'ports', 'osinfo', 'network', 'process'] diff --git a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py index 846b25405e..198637027e 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py @@ -1,14 +1,15 @@ import argparse import logging import os +import wazuh_testing.tools.agent_simulator as ag +from wazuh_testing.modules.syscollector import SYSCOLLECTOR_DELTA_EVENT_TYPES + +from wazuh_testing import TCP from multiprocessing import Process from time import sleep -import wazuh_testing.tools.agent_simulator as ag -from wazuh_testing import TCP logging.basicConfig(level=logging.INFO) - logger = logging.getLogger(f"P{os.getpid()}") @@ -51,6 +52,12 @@ def process_script_parameters(args): if not args.version.startswith('v'): args.version = 'v' + args.version + if args.syscollector_event_types: + args.syscollector_event_types = args.syscollector_event_types.split(' ') + if any(event_type not in SYSCOLLECTOR_DELTA_EVENT_TYPES for event_type in args.syscollector_event_types): + print(args.syscollector_event_types) + raise ValueError(f'Invalid syscollector event type. Valid values are: {SYSCOLLECTOR_DELTA_EVENT_TYPES}') + def set_agent_modules_and_eps(agent, active_modules, modules_eps): """Set active modules and EPS to an agent. @@ -86,6 +93,18 @@ def set_agent_modules_and_eps(agent, active_modules, modules_eps): logger.info(agent.modules) +def create_agent(args, custom_labels): + agent = ag.Agent(manager_address=args.manager_address, os=args.os, + registration_address=args.manager_registration_address, + version=args.version, fixed_message_size=args.fixed_message_size, labels=custom_labels, + logcollector_msg_number=args.enable_logcollector_message_number, + custom_logcollector_message=args.custom_logcollector_message, + syscollector_frequency=args.syscollector_frequency, + syscollector_event_types=args.syscollector_event_types, + syscollector_legacy_messages=args.syscollector_legacy_messages) + return agent + + def create_agents(args): """Create a list of agents according to script parameters like the mode, EPS... Args: @@ -110,21 +129,14 @@ def create_agents(args): logger.info(f"Agents-EPS distributon = {distribution_list}") for item in distribution_list: # item[0] = modules - item[1] = eps - agent = ag.Agent(manager_address=args.manager_address, os=args.os, - registration_address=args.manager_registration_address, - version=args.version, fixed_message_size=args.fixed_message_size, labels=custom_labels, - logcollector_msg_number=args.enable_logcollector_message_number, - custom_logcollector_message=args.custom_logcollector_message) + agent = create_agent(args, custom_labels) set_agent_modules_and_eps(agent, item[0].split(' ') + ['keepalive', 'receive_messages'], item[1].split(' ') + ['0', '0']) agents.append(agent) else: for _ in range(args.agents_number): - agent = ag.Agent(manager_address=args.manager_address, os=args.os, - registration_address=args.manager_registration_address, - version=args.version, fixed_message_size=args.fixed_message_size, labels=custom_labels, - logcollector_msg_number=args.enable_logcollector_message_number, - custom_logcollector_message=args.custom_logcollector_message) + agent = create_agent(args, custom_labels) + set_agent_modules_and_eps(agent, args.modules, args.modules_eps) agents.append(agent) @@ -344,6 +356,30 @@ def main(): help='Custom logcollector message', required=False, default='', dest='custom_logcollector_message') + arg_parser.add_argument('--syscollector-frequency', + metavar='', + type=int, + help='Frequency of Syscollector scans. Set to 1 for constant message sending.', + required=False, + default=60, + dest='syscollector_frequency') + + arg_parser.add_argument('--syscollector-event-types', + metavar='', + type=str, + help='''Space-separated list of event types for syscollector messages. + Default is "packages".''', + required=False, + default='packages', + dest='syscollector_event_types') + + arg_parser.add_argument('--syscollector-legacy-messages', + help='Enable prior 4.2 agents syscollector format. Default is False.', + required=False, + action='store_true', + default=False, + dest='syscollector_legacy_messages') + args = arg_parser.parse_args() process_script_parameters(args) diff --git a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py index 5d409366a1..3dafa241d2 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py +++ b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py @@ -121,10 +121,9 @@ def __init__(self, manager_address, cypher="aes", os=None, rootcheck_sample=None syscollector_frequency=60.0, syscollector_batch_size=10, hostinfo_eps=100, winevt_eps=100, fixed_message_size=None, registration_address=None, retry_enrollment=False, logcollector_msg_number=None, custom_logcollector_message='', - syscollector_message_type_list=['packages'], - # syscollector_message_type_list=['network', 'port', 'hotfix', 'process', 'packages', 'osinfo', 'hwinfo'], + syscollector_event_types=['network', 'port', 'hotfix', 'process', 'packages', 'osinfo', 'hwinfo'], syscollector_packages_vuln_content=None, - syscollector_message_old_format=False): + syscollector_legacy_messages=False): self.id = id self.name = name self.key = key @@ -140,8 +139,8 @@ def __init__(self, manager_address, cypher="aes", os=None, rootcheck_sample=None self.fim_integrity_eps = fim_integrity_eps self.syscollector_eps = syscollector_eps - self.syscollector_message_type_list = syscollector_message_type_list - self.syscollector_message_old_format = syscollector_message_old_format + self.syscollector_event_types = syscollector_event_types + self.syscollector_legacy_messages = syscollector_legacy_messages self.syscollector_packages_vuln_content = syscollector_packages_vuln_content self.rootcheck_eps = rootcheck_eps @@ -664,8 +663,8 @@ def init_sca(self): def init_syscollector(self): """Initialize syscollector module.""" if self.syscollector is None: - self.syscollector = GeneratorSyscollector(self.name, self.syscollector_message_type_list, - self.syscollector_message_old_format, + self.syscollector = GeneratorSyscollector(self.name, self.syscollector_event_types, + self.syscollector_legacy_messages, self.syscollector_batch_size, self.syscollector_packages_vuln_content) @@ -769,15 +768,13 @@ def __init__(self, agent_name, event_types_list, old_format, batch_size, syscoll 'packages': 'dbsync_packages', 'hotfix': 'dbsync_hotfix', 'hwinfo': 'dbsync_hwinfo', - 'port': 'dbsync_ports', + 'ports': 'dbsync_ports', 'osinfo': 'dbsync_osinfo', 'network': 'dbsync_network_iface', 'process': 'dbsync_processes' } self.syscollector_packages_vuln_content = syscollector_packages_vuln_content - - self.default_package_data = { '': 'A low-level cryptographic library', '': 'x86_64', @@ -795,52 +792,60 @@ def __init__(self, agent_name, event_types_list, old_format, batch_size, syscoll self.syscollector_mq = 'd' self.current_id = 1 - def format_event(self, message_type): - """Format syscollector message of the specified type. + def get_event_template_legacy(self, message_type): + """Get syscollector legacy message of the specified type. + Args: + message_type (str): Syscollector event type. + Return: + str: Syscollector legacy event message. + """ + message = syscollector.LEGACY_SYSCOLLECTOR_HEADER + if message_type == 'network': + message += syscollector.LEGACY_SYSCOLLECTOR_NETWORK_EVENT_TEMPLATE + elif message_type == 'process': + message += syscollector.LEGACY_SYSCOLLECTOR_PROCESS_EVENT_TEMPLATE + elif message_type == 'ports': + message += syscollector.LEGACY_SYSCOLLECTOR_PORTS_EVENT_TEMPLATE + elif message_type == 'packages': + message += syscollector.LEGACY_SYSCOLLECTOR_PACKAGES_EVENT_TEMPLATE + elif message_type == 'osinfo': + message += syscollector.LEGACY_SYSCOLLECTOR_OS_EVENT_TEMPLATE + elif message_type == 'hwinfo': + message += syscollector.LEGACY_SYSCOLLECTOR_HARDWARE_EVENT_TEMPLATE + elif message_type == 'hotfix': + message += syscollector.LEGACY_SYSCOLLECTOR_HOTFIX_EVENT_TEMPLATE + elif 'end' in message_type: + message += '}' + + return message + + def get_event_template(self, message_type): + """Get syscollector message of the specified type. Args: message_type (str): Syscollector event type. Returns: - str: the generated syscollector event message. + str: Syscollector event message. """ - if self.old_format: - message = syscollector.LEGACY_SYSCOLLECTOR_HEADER - if message_type == 'network': - message += syscollector.LEGACY_SYSCOLLECTOR_NETWORK_EVENT_TEMPLATE - elif message_type == 'process': - message += syscollector.LEGACY_SYSCOLLECTOR_PROCESS_EVENT_TEMPLATE - elif message_type == 'port': - message += syscollector.LEGACY_SYSCOLLECTOR_PORT_EVENT_TEMPLATE - elif message_type == 'packages': - message += syscollector.LEGACY_SYSCOLLECTOR_PACKAGES_EVENT_TEMPLATE - elif message_type == 'osinfo': - message += syscollector.LEGACY_SYSCOLLECTOR_OS_EVENT_TEMPLATE - elif message_type == 'hwinfo': - message += syscollector.LEGACY_SYSCOLLECTOR_HARDWARE_EVENT_TEMPLATE - elif message_type == 'hotfix': - message += syscollector.LEGACY_SYSCOLLECTOR_HOTFIX_EVENT_TEMPLATE - elif 'end' in message_type: - message += '}' - else: - message_event_type = self.syscollector_event_type_mapping[message_type] - operation = 'INSERTED' if (message_type == 'osinfo' or message_type == 'packages') else 'MODIFIED' - message_operation = operation - - message_data = {} - - if message_type == 'network': - message_data = syscollector.SYSCOLLECTOR_NETWORK_IFACE_DELTA_EVENT_TEMPLATE - elif message_type == 'process': - message_data = syscollector.SYSCOLLECTOR_PROCESSSES_DELTA_EVENT_TEMPLATE - elif message_type == 'port': - message_data = syscollector.SYSCOLLECTOR_PORTS_DELTA_EVENT_TEMPLATE - elif message_type == 'packages': - message_data = syscollector.SYSCOLLECTOR_PACKAGE_DELTA_DATA_TEMPLATE - elif message_type == 'osinfo': - message_data = syscollector.SYSCOLLECTOR_OSINFO_DELTA_EVENT_TEMPLATE - elif message_type == 'hwinfo': - message_data = syscollector.SYSCOLLECTOR_HWINFO_DELTA_EVENT_TEMPLATE - elif message_type == 'hotfix': - message_data = syscollector.SYSCOLLECTOR_HOTFIX_DELTA_DATA_TEMPLATE + message_event_type = self.syscollector_event_type_mapping[message_type] + operation = 'INSERTED' if (message_type == 'osinfo' or message_type == 'packages') else 'MODIFIED' + message_operation = operation + + message_data = {} + + if message_type == 'network': + message_data = syscollector.SYSCOLLECTOR_NETWORK_IFACE_DELTA_EVENT_TEMPLATE + elif message_type == 'process': + message_data = syscollector.SYSCOLLECTOR_PROCESSSES_DELTA_EVENT_TEMPLATE + elif message_type == 'ports': + message_data = syscollector.SYSCOLLECTOR_PORTS_DELTA_EVENT_TEMPLATE + elif message_type == 'packages': + message_data = syscollector.SYSCOLLECTOR_PACKAGE_DELTA_DATA_TEMPLATE + elif message_type == 'osinfo': + message_data = syscollector.SYSCOLLECTOR_OSINFO_DELTA_EVENT_TEMPLATE + elif message_type == 'hwinfo': + message_data = syscollector.SYSCOLLECTOR_HWINFO_DELTA_EVENT_TEMPLATE + elif message_type == 'hotfix': + message_data = syscollector.SYSCOLLECTOR_HOTFIX_DELTA_DATA_TEMPLATE message = '{"type": "%s", "data": %s, "operation": "%s"}' % ( message_event_type, @@ -848,8 +853,12 @@ def format_event(self, message_type): message_operation ) + return message + + def format_event_template(self, template, message_type=None): today = date.today() timestamp = today.strftime("%Y/%m/%d %H:%M:%S") + message = template generics_fields_to_replace = [ ('', self.agent_name), ('', f"{self.current_id}"), @@ -865,11 +874,9 @@ def format_event(self, message_type): for package_key, package_value in self.default_package_data.items(): message = message.replace(package_key, package_value) - self.current_id += 1 + final_mesage = f"{self.syscollector_mq}:{self.syscollector_tag}:{message}" - message = f"{self.syscollector_mq}:{self.syscollector_tag}:{message}" - - return message + return final_mesage def generate_event(self): """Generate syscollector event. @@ -878,7 +885,6 @@ def generate_event(self): Returns: str: generated event with the desired format for syscollector """ - print("Generating event") if self.current_batch_events_size == 0: self.current_batch_events = (self.current_batch_events + 1) % len(self.list_events) self.current_batch_events_size = self.batch_size @@ -891,10 +897,16 @@ def generate_event(self): self.current_batch_events_size = self.current_batch_events_size - 1 + if self.old_format: + event_template = self.get_event_template_legacy(self.list_events[self.current_batch_events]) + else: + event_template = self.get_event_template(self.list_events[self.current_batch_events]) - event_final = self.format_event(event) + event_final = self.format_event_template(event_template, event) print(event_final) + self.current_id += 1 + return event_final From c6cf331f53027bb0099410ed88373cf83cac435e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 10 Jan 2024 16:07:13 +0000 Subject: [PATCH 081/174] refac: remove debugging messages --- .../wazuh_testing/tools/agent_simulator.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py index 3dafa241d2..19719f3bdc 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py +++ b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py @@ -903,7 +903,6 @@ def generate_event(self): event_template = self.get_event_template(self.list_events[self.current_batch_events]) event_final = self.format_event_template(event_template, event) - print(event_final) self.current_id += 1 @@ -1677,8 +1676,6 @@ def run_module(self, module): module_info = self.agent.modules[module] eps = module_info['eps'] if 'eps' in module_info else 1 frequency = module_info["frequency"] if 'frequency' in module_info else 1 - print(f"Defining frequency as {frequency}") - sleep(10) start_time = time() @@ -1718,7 +1715,6 @@ def run_module(self, module): while self.stop_thread == 0: sent_messages = 0 while sent_messages < batch_messages: - print("Send event") event_msg = module_event_generator() if self.agent.fixed_message_size is not None: event_msg_size = getsizeof(event_msg) @@ -1737,14 +1733,8 @@ def run_module(self, module): self.totalMessages += 1 sent_messages += 1 if self.totalMessages % eps == 0: - print("Sleeping") - print(self.totalMessages) - print(1.0 - ((time() - start_time) % 1.0)) sleep(1.0 - ((time() - start_time) % 1.0)) if frequency > 1: - print("Sleeping freq") - print(frequency) - print(frequency - ((time() - start_time) % frequency)) sleep(frequency - ((time() - start_time) % frequency)) def run(self): From 081be17c7e300e0ce256a048330492680b50c78b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 10 Jan 2024 16:14:32 +0000 Subject: [PATCH 082/174] style: remove extra whitespaces in syscollector module --- deps/wazuh_testing/wazuh_testing/data/syscollector.py | 5 ----- deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py | 1 - deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py | 2 ++ 3 files changed, 2 insertions(+), 6 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/data/syscollector.py b/deps/wazuh_testing/wazuh_testing/data/syscollector.py index 6fdfd5476e..f98d5c36b6 100644 --- a/deps/wazuh_testing/wazuh_testing/data/syscollector.py +++ b/deps/wazuh_testing/wazuh_testing/data/syscollector.py @@ -71,7 +71,6 @@ "version": "" } - SYSCOLLECTOR_HOTFIX_DELTA_DATA_TEMPLATE = { "checksum": "", "hotfix": "", @@ -145,7 +144,6 @@ "tx_queue": "" } - SYSCOLLECTOR_HWINFO_DELTA_EVENT_TEMPLATE = { "scan_time": "", "board_serial": "", @@ -158,7 +156,6 @@ "ram_usage": "" } - SYSCOLLECTOR_NETWORK_IFACE_DELTA_EVENT_TEMPLATE = { "adapter": None, "checksum": "", @@ -179,7 +176,6 @@ "type": "" } - SYSCOLLECTOR_NETWORK_NETADDR_DELTA_EVENT_TEMPLATE = { "id": "", "scan_id": "", @@ -191,7 +187,6 @@ "item_id": "" } - SYSCOLLECTOR_NETWORK_NETPRO_DELTA_EVENT_TEMPLATE = { "id": "", "scan_id": "", diff --git a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py index 198637027e..7cbfa549de 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py @@ -55,7 +55,6 @@ def process_script_parameters(args): if args.syscollector_event_types: args.syscollector_event_types = args.syscollector_event_types.split(' ') if any(event_type not in SYSCOLLECTOR_DELTA_EVENT_TYPES for event_type in args.syscollector_event_types): - print(args.syscollector_event_types) raise ValueError(f'Invalid syscollector event type. Valid values are: {SYSCOLLECTOR_DELTA_EVENT_TYPES}') diff --git a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py index 19719f3bdc..c8e0393f61 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py +++ b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py @@ -903,6 +903,7 @@ def generate_event(self): event_template = self.get_event_template(self.list_events[self.current_batch_events]) event_final = self.format_event_template(event_template, event) + logging.debug(f"Syscollector Event - {event_final}") self.current_id += 1 @@ -1734,6 +1735,7 @@ def run_module(self, module): sent_messages += 1 if self.totalMessages % eps == 0: sleep(1.0 - ((time() - start_time) % 1.0)) + if frequency > 1: sleep(frequency - ((time() - start_time) % frequency)) From 06b8de3297c8048c458a53160fbd2be7af5e8142 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 10 Jan 2024 17:10:31 +0000 Subject: [PATCH 083/174] feat: include debug option in simulate_agents --- .../wazuh_testing/scripts/simulate_agents.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py index 7cbfa549de..4a5f9f3af2 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py @@ -57,6 +57,9 @@ def process_script_parameters(args): if any(event_type not in SYSCOLLECTOR_DELTA_EVENT_TYPES for event_type in args.syscollector_event_types): raise ValueError(f'Invalid syscollector event type. Valid values are: {SYSCOLLECTOR_DELTA_EVENT_TYPES}') + if args.debug: + logging.getLogger().setLevel(logging.DEBUG) + def set_agent_modules_and_eps(agent, active_modules, modules_eps): """Set active modules and EPS to an agent. @@ -341,6 +344,13 @@ def main(): help='Disable keepalive module', required=False, default=False, dest='disable_keepalive') + arg_parser.add_argument('--debug', + help='Enable debug mode', + required=False, + action='store_true', + default=False, + dest='debug') + arg_parser.add_argument('-d', '--disable-receive', metavar='', type=bool, help='Disable receive message module', required=False, default=False, dest='disable_receive') From 90d09c436c6369795d56298b80ab955c87d4f42c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 10 Jan 2024 17:21:37 +0000 Subject: [PATCH 084/174] docs: include available syscollector types --- deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py index 4a5f9f3af2..81252bc682 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py @@ -377,7 +377,8 @@ def main(): metavar='', type=str, help='''Space-separated list of event types for syscollector messages. - Default is "packages".''', + Default is "packages". Available types are "packages", "processes", "ports", + "network", "hotfix", "hwinfo", "osinfo"''', required=False, default='packages', dest='syscollector_event_types') From 591958f4b724bad7f8c68b087852dca20d473226 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 10 Jan 2024 17:24:32 +0000 Subject: [PATCH 085/174] style: fix imports in agent simulator --- deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py index c8e0393f61..35874531fa 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py +++ b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py @@ -19,6 +19,7 @@ import ssl import threading import zlib +import re from datetime import date from itertools import cycle from random import randint, sample, choice, getrandbits @@ -27,7 +28,6 @@ from struct import pack from sys import getsizeof from time import mktime, localtime, sleep, time -import re import wazuh_testing.data.syscollector as syscollector import wazuh_testing.data.winevt as winevt From d92aff2e1d2a95c2d46a844ba7a7cac29fa7a0dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 11 Jan 2024 09:07:16 +0000 Subject: [PATCH 086/174] style: sort simulate agents imports --- deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py index 81252bc682..1736cbae0c 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py @@ -2,11 +2,11 @@ import logging import os import wazuh_testing.tools.agent_simulator as ag -from wazuh_testing.modules.syscollector import SYSCOLLECTOR_DELTA_EVENT_TYPES -from wazuh_testing import TCP from multiprocessing import Process from time import sleep +from wazuh_testing.modules.syscollector import SYSCOLLECTOR_DELTA_EVENT_TYPES +from wazuh_testing import TCP logging.basicConfig(level=logging.INFO) From a5fdbb89852bed915a8df50319b8a387d0508566 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 11 Jan 2024 09:07:47 +0000 Subject: [PATCH 087/174] style: refactor agent creation --- .../wazuh_testing/scripts/simulate_agents.py | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py index 1736cbae0c..74082371c2 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py @@ -96,14 +96,22 @@ def set_agent_modules_and_eps(agent, active_modules, modules_eps): def create_agent(args, custom_labels): - agent = ag.Agent(manager_address=args.manager_address, os=args.os, - registration_address=args.manager_registration_address, - version=args.version, fixed_message_size=args.fixed_message_size, labels=custom_labels, - logcollector_msg_number=args.enable_logcollector_message_number, - custom_logcollector_message=args.custom_logcollector_message, - syscollector_frequency=args.syscollector_frequency, - syscollector_event_types=args.syscollector_event_types, - syscollector_legacy_messages=args.syscollector_legacy_messages) + agent_args = { + 'manager_address': args.manager_address, + 'os': args.os, + 'registration_address': args.manager_registration_address, + 'version': args.version, + 'fixed_message_size': args.fixed_message_size, + 'labels': custom_labels, + 'logcollector_msg_number': args.enable_logcollector_message_number, + 'custom_logcollector_message': args.custom_logcollector_message, + 'syscollector_frequency': args.syscollector_frequency, + 'syscollector_event_types': args.syscollector_event_types, + 'syscollector_legacy_messages': args.syscollector_legacy_messages + } + + agent = ag.Agent(**agent_args) + return agent From 52b70beedbeeb978246cbf6fcaca5249424fa8cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 12 Jan 2024 17:20:34 +0000 Subject: [PATCH 088/174] feat: improve logging and reports in VD E2E tests --- .../wazuh_testing/end_to_end/indexer_api.py | 28 +- .../wazuh_testing/end_to_end/monitoring.py | 33 +- .../end_to_end/vulnerability_detector.py | 53 +-- .../wazuh_testing/end_to_end/waiters.py | 4 +- .../modules/syscollector/__init__.py | 2 +- .../wazuh_testing/tools/system.py | 68 +++- tests/end_to_end/conftest.py | 2 + tests/end_to_end/pytest.ini | 5 + .../configurations/agent.yaml | 2 +- .../test_vulnerability_detector/conftest.py | 184 ++++++++++ .../test_vulnerability_detector.py | 323 ++++++++++++++---- 11 files changed, 603 insertions(+), 101 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index 6af4a2da3a..877ba8819c 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -35,6 +35,7 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' Returns: str: The response text from the indexer API. """ + print('Getting values from the Indexer API') url = f"https://{host_manager.get_master_ip()}:9200/{index}/_search" headers = { @@ -46,20 +47,35 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' "match_all": {} } } + if greater_than_timestamp: - data['query'].update( + query = { + "bool": { + "must": [ + {"match_all": {}}, + {"range": {"@timestamp": {"gte": f"{greater_than_timestamp}"}}} + ] + } + } + + sort = [ { - 'range': { - "@timestamp": { - "gte": greater_than_timestamp - } + "@timestamp": { + "order": "desc" } - }) + } + ] + + data['query'] = query + data['sort'] = sort + param = { 'pretty': 'true', 'size': 10000, } + print(data) + response = requests.get(url=url, params=param, verify=False, auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password']), headers=headers, json=data) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index f82260b32d..1cbed80e1d 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -21,13 +21,14 @@ from time import sleep from typing import Dict, List from multiprocessing.pool import ThreadPool +from concurrent.futures import ThreadPoolExecutor, as_completed from wazuh_testing.end_to_end import logs_filepath_os from wazuh_testing.end_to_end.regex import get_event_regex from wazuh_testing.tools.system import HostManager -def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict) -> None: +def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict, ignore_error=False) -> Dict: """ Monitor events on multiple hosts concurrently. @@ -35,7 +36,8 @@ def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict host_manager: An instance of the HostManager class containing information about hosts. monitoring_data: A dictionary containing monitoring data for each host. """ - def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict], scan_interval: int = 5): + def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict], scan_interval: int = 5, + ignore_error=False): """ Monitor the specified elements on a host. @@ -47,6 +49,7 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: Raises: TimeoutError: If no match is found within the specified timeout. """ + elements_not_found = [] for element in monitoring_elements: regex, timeout, monitoring_file = element['regex'], element['timeout'], element['file'] @@ -63,11 +66,29 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: current_timeout += 5 if not regex_match: - raise TimeoutError("No match found within the specified timeout.") + elements_not_found.append(element) + if not ignore_error: + raise TimeoutError(f"Element not found: {element}") - with ThreadPool() as pool: - # Use the pool to map the function to the list of hosts - pool.starmap(monitoring_event, [(host_manager, host, data) for host, data in monitoring_data.items()]) + host_elements_not_found = {} + host_elements_not_found[host] = elements_not_found + + return host_elements_not_found + + with ThreadPoolExecutor() as executor: + futures = [] + for host, data in monitoring_data.items(): + futures.append(executor.submit(monitoring_event, host_manager, host, data, ignore_error)) + + results = {} + for future in as_completed(futures): + try: + result = future.result() + results.update(result) + except Exception as e: + print(f"An error occurred: {e}") + + return results def generate_monitoring_logs_all_agent(host_manager: HostManager, regex_list: list, timeout_list: list) -> dict: diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py index 0ce35c57bc..14191b02e0 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py @@ -47,31 +47,33 @@ def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[s assert len(expected_alerts_not_found) == 0, f"Expected alerts were not found {expected_alerts_not_found}" -def detect_alerts_by_agent(alerts, regex, current_datetime=None): +def get_alerts_by_agent(alerts, regex): + """ + Get specific alerts by agent. + + Args: + alerts (list): List of alerts. + regex (str): Regular expression to match the alerts. + + Returns: + dict: Dictionary containing the alerts by agent. + """ alerts_vuln_by_agent = {} + for alert in alerts: - valid_timestamp = True - if current_datetime: - dt = datetime.strptime(alert['_source']['timestamp'], "%Y-%m-%dT%H:%M:%S.%f%z") - - # Convert datetime to Unix timestamp (integer) - timestamp = int(dt.timestamp()) - if timestamp < current_datetime: - valid_timestamp = False - - if valid_timestamp: - if re.match(regex, alert['_source']['rule']['description']): - if 'agent' in alert['_source']: - agent = alert['_source']['agent']['name'] - if agent not in alerts_vuln_by_agent: - alerts_vuln_by_agent[agent] = [] - else: - alerts_vuln_by_agent[agent].append(alert) + if re.match(regex, alert['_source']['rule']['description']): + if 'agent' in alert['_source']: + agent = alert['_source']['agent']['name'] + if agent not in alerts_vuln_by_agent: + alerts_vuln_by_agent[agent] = [] + else: + alerts_vuln_by_agent[agent].append(alert) return alerts_vuln_by_agent -def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict[str, Dict], current_datetime: str = None): +def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict[str, Dict], + current_datetime: str = ''): """ Check vulnerability alerts in the indexer for a host. @@ -82,14 +84,15 @@ def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict Returns: list: List of vulnerability alerts. """ - regex_cve_affects = f"CVE.* affects .*" - regex_solved_vuln = f"The .* that affected .* was solved due to a package removal" - + regex_cve_affects = "CVE.* affects .*" + regex_solved_vuln = "The .* that affected .* was solved due to a package removal" indexer_alerts = get_indexer_values(host_manager, greater_than_timestamp=current_datetime)['hits']['hits'] + # Get CVE affects alerts for all agents - detected_vuln_alerts_by_agent = detect_alerts_by_agent(indexer_alerts, regex_cve_affects, current_datetime) - solved_alerts_by_agent = detect_alerts_by_agent(indexer_alerts, regex_solved_vuln, current_datetime) + detected_vuln_alerts_by_agent = get_alerts_by_agent(indexer_alerts, regex_cve_affects) + solved_alerts_by_agent = get_alerts_by_agent(indexer_alerts, regex_solved_vuln) + triggered_alerts = detected_vuln_alerts_by_agent expected_alerts_not_found = [] @@ -114,4 +117,4 @@ def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict if not found: expected_alerts_not_found.append(vulnerability) - assert len(expected_alerts_not_found) == 0, f"Expected alerts were not found {expected_alerts_not_found}" \ No newline at end of file + assert len(expected_alerts_not_found) == 0, f"Expected alerts were not found {expected_alerts_not_found}" diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index e62b7bb81d..722c238725 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -35,10 +35,10 @@ def wait_until_vd_is_updated(host_manager: HostManager) -> None: for manager in host_manager.get_group_hosts('manager'): monitoring_data = generate_monitoring_logs_manager( - host_manager, manager, 'Message processed', 1000 + host_manager, manager, "INFO: Action for 'vulnerability_feed_manager' finished", 1000 ) - monitoring_events_multihost(host_manager, monitoring_data) + monitoring_events_multihost(host_manager, monitoring_data) def wait_until_vuln_scan_agents_finished(host_manager: HostManager) -> None: diff --git a/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py b/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py index a746f4bd76..c7c919d0a9 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py @@ -1 +1 @@ -TIMEOUT_SYSCOLLECTOR_SCAN = 200 +TIMEOUT_SYSCOLLECTOR_SCAN = 360 diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 64a36aa327..a1719b0abd 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -4,6 +4,7 @@ import json import tempfile +import logging import xml.dom.minidom as minidom from typing import Union import testinfra @@ -15,6 +16,11 @@ from ansible.parsing.dataloader import DataLoader from ansible.vars.manager import VariableManager + +logger = logging.getLogger('testinfra') +logger.setLevel(logging.CRITICAL) + + class HostManager: """This class is an extensible remote host management interface. Within this we have multiple functions to modify the remote hosts depending on what our tests need. @@ -300,7 +306,7 @@ def get_api_token(self, host, user='wazuh', password='wazuh', auth_context=None, """Return an API token for the specified user. Args: - host (str): Hostname. + host (str): HostName in inventory. user (str, optional): API username. Default `wazuh` password (str, optional): API password. Default `wazuh` auth_context (dict, optional): Authorization context body. Default `None` @@ -512,6 +518,25 @@ def get_master_ip(self): return master_ip + def get_master(self): + """ + Retrieves the master node from the inventory. + + Returns: + str: The master node, or None if not found. + """ + master_node = None + + for manager in self.get_group_hosts('manager'): + if 'type' in self.get_host_variables(manager) and \ + self.get_host_variables(manager)['type'] == 'master': + master_node = manager + break + if master_node is None: + raise ValueError('Master node not found in inventory') + + return master_node + def remove_package(self, host, package_name, system): """ Removes a package from the specified host. @@ -588,6 +613,47 @@ def control_environment(self, operation, group_list): for host in self.get_group_hosts(group): self.handle_wazuh_services(host, operation) + def get_agents_ids(self): + """ + Retrieves the ID of the agents from the API. + + Args: + agent_name (str): The name of the agent. + + Returns: + str: The ID of the agent. + """ + token = self.get_api_token(self.get_master()) + agents = self.make_api_call(self.get_master(), endpoint='/agents/', token=token)['json']['data'] + + agents_ids = [] + + for agent in agents['affected_items']: + if agent['id'] != '000': + agents_ids.append(agent['id']) + + return agents_ids + + def remove_agents(self): + """ + Removes all the agents from the API. + + Args: + host (str): The target host from which to remove the agent. + + Example: + host_manager.remove_agent('my_host', 'my_agent_id') + """ + token = self.get_api_token(self.get_master()) + agents_ids = self.get_agents_ids() + result = self.make_api_call( + host=self.get_master(), + method='DELETE', + endpoint=f'/agents?agents_list={",".join(agents_ids)}&status=all&older_than=0s', + token=token, + ) + + def clean_environment(host_manager, target_files): """Clears a series of files on target hosts managed by a host manager diff --git a/tests/end_to_end/conftest.py b/tests/end_to_end/conftest.py index d9dd577694..7e6a91751e 100644 --- a/tests/end_to_end/conftest.py +++ b/tests/end_to_end/conftest.py @@ -346,3 +346,5 @@ def pytest_addoption(parser): type=str, help='Ansible roles path.', ) + + diff --git a/tests/end_to_end/pytest.ini b/tests/end_to_end/pytest.ini index e561b2bf7f..1757f9625e 100644 --- a/tests/end_to_end/pytest.ini +++ b/tests/end_to_end/pytest.ini @@ -1,5 +1,10 @@ [pytest] log_cli = 1 + log_cli_level = ERROR log_cli_format = %(asctime)s %(message)s (%(filename)s:%(lineno)s) log_cli_date_format=%Y-%m-%d %H:%M:%S + +log_file_level = ERROR +log_file_format = %(asctime)s %(message)s (%(filename)s:%(lineno)s) +log_file_date_format = %Y-%m-%d %H:%M:%S diff --git a/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml b/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml index 32edc6424e..9be7ff1abb 100644 --- a/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml +++ b/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml @@ -21,4 +21,4 @@ - disabled: value: 'no' - interval: - value: '1m' + value: '6m' diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index c0e6b3b50b..a1afbca40f 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -20,11 +20,21 @@ def test_example(host_manager): ``` """ import pytest +import json +import datetime +import os +import shutil +import uuid +from py.xml import html +from numpydoc.docscrape import FunctionDoc from wazuh_testing.tools.system import HostManager from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations +catalog = list() +results = dict() + @pytest.fixture(scope='session') def host_manager(request): """Fixture for creating a HostManager instance. @@ -60,3 +70,177 @@ def setup(preconditions, teardown, host_manager): if teardown: print("Configuring teardonw") launch_parallel_operations(teardown, host_manager) + + +@pytest.fixture(scope='session', autouse=True) +def handle_logs(): + + logs_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'logs') + os.makedirs(logs_dir, exist_ok=True) + + yield + + shutil.rmtree(logs_dir, ignore_errors=True) + + +# Configure logging +@pytest.hookimpl(hookwrapper=True, tryfirst=True) +def pytest_runtest_setup(item): + item_name = item._request.node.name + logs_dir = os.path.join(os.curdir, 'logs') + logs_test_dir = os.path.join(logs_dir, item_name) + + os.makedirs(logs_test_dir, exist_ok=True) + + item_log_path = os.path.join('logs', item_name) + + config = item.config + logging_plugin = config.pluginmanager.get_plugin("logging-plugin") + + filename = os.path.join(item_log_path, item._request.node.name + ".log") + + logging_plugin.set_log_path(str(filename)) + + yield + + +def pytest_html_results_table_header(cells): + print("Using filename as logging path") + cells.insert(4, html.th('Tier', class_='sortable tier', col='tier')) + cells.insert(3, html.th('Markers')) + cells.insert(2, html.th('Description')) + cells.insert(1, html.th('Time', class_='sortable time', col='time')) + + +def pytest_html_results_table_row(report, cells): + try: + cells.insert(4, html.td(report.tier)) + cells.insert(3, html.td(report.markers)) + cells.insert(2, html.td(report.description)) + cells.insert(1, html.td(datetime.utcnow(), class_='col-time')) + except AttributeError: + pass + + +# HARDCODE: pytest-html generates too long file names. This temp fix is to reduce the name of +# the assets +def create_asset( + self, content, extra_index, test_index, file_extension, mode="w" +): + asset_file_name = "{}.{}".format( + str(uuid.uuid4()), + file_extension + ) + asset_path = os.path.join( + os.path.dirname(self.logfile), "assets", asset_file_name + ) + + if not os.path.exists(os.path.dirname(asset_path)): + os.makedirs(os.path.dirname(asset_path)) + + relative_path = os.path.join("assets", asset_file_name) + + kwargs = {"encoding": "utf-8"} if "b" not in mode else {} + + with open(asset_path, mode, **kwargs) as f: + f.write(content) + return relative_path + + +@pytest.hookimpl(hookwrapper=True) +def pytest_runtest_makereport(item, call): + pytest_html = item.config.pluginmanager.getplugin('html') + outcome = yield + report = outcome.get_result() + documentation = FunctionDoc(item.function) + + # Add description, markers and tier to the report + report.description = '. '.join(documentation["Summary"]) + report.tier = ', '.join(str(mark.kwargs['level']) for mark in item.iter_markers(name="tier")) + report.markers = ', '.join(mark.name for mark in item.iter_markers() if + mark.name != 'tier' and mark.name != 'parametrize') + + if report.location[0] not in results: + results[report.location[0]] = {'passed': 0, 'failed': 0, 'skipped': 0, 'xfailed': 0, 'error': 0} + + extra = getattr(report, 'extra', []) + if report.when == 'call': + # Apply hack to fix length filename problem + pytest_html.HTMLReport.TestResult.create_asset = create_asset + + # Add extended information from docstring inside 'Result' section + extra.append(pytest_html.extras.html('

Test function details

')) + for section in ('Extended Summary', 'Parameters'): + extra.append(pytest_html.extras.html(f'

{section}

')) + for line in documentation[section]: + extra.append(pytest_html.extras.html(f'
{line}
')) + arguments = dict() + + # Add arguments of each text as a json file + for key, value in item.funcargs.items(): + if isinstance(value, set): + arguments[key] = list(value) + try: + json.dumps(value) + arguments[key] = value + except (TypeError, OverflowError): + arguments[key] = str(value) + extra.append(pytest_html.extras.json(arguments, name="Test arguments")) + + # Extra files to be added in 'Links' section + logs_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'logs', item._request.node.name) + + files = [os.path.join(logs_path, f) for f in os.listdir(logs_path) if + os.path.isfile(os.path.join(logs_path, f))] + + import pdb; pdb.set_trace() + + for filepath in files: + if os.path.isfile(filepath): + with open(filepath, mode='r', errors='replace') as f: + content = f.read() + extra.append(pytest_html.extras.text(content, name=os.path.split(filepath)[-1])) + + if not report.passed and not report.skipped: + report.extra = extra + + if report.longrepr is not None and report.longreprtext.split()[-1] == 'XFailed': + results[report.location[0]]['xfailed'] += 1 + else: + results[report.location[0]][report.outcome] += 1 + + elif report.outcome == 'failed': + results[report.location[0]]['error'] += 1 + + +class SummaryTable(html): + class table(html.table): + style = html.Style(border='1px solid #e6e6e6', margin='16px 0px', color='#999', font_size='12px') + + class td(html.td): + style = html.Style(padding='5px', border='1px solid #E6E6E6', text_align='left') + + class th(html.th): + style = html.Style(padding='5px', border='1px solid #E6E6E6', text_align='left', font_weight='bold') + + +def pytest_html_results_summary(prefix, summary, postfix): + postfix.extend([SummaryTable.table( + html.thead( + html.tr([ + SummaryTable.th("Tests"), + SummaryTable.th("Failed"), + SummaryTable.th("Success"), + SummaryTable.th("XFail"), + SummaryTable.th("Error")] + ), + ), + [html.tbody( + html.tr([ + SummaryTable.td(k), + SummaryTable.td(v['failed']), + SummaryTable.td(v['passed']), + SummaryTable.td(v['xfailed']), + SummaryTable.td(v['error']), + ]) + ) for k, v in results.items()])]) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index e2a8458085..d19646aacd 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -44,22 +44,22 @@ import pytest import logging import yaml +import json import time -import re import ast -from typing import Generator, Dict, List +import datetime +from typing import Generator from wazuh_testing.end_to_end.configuration import backup_configurations, restore_configuration, configure_environment from wazuh_testing.end_to_end.logs import truncate_remote_host_group_files -from wazuh_testing.end_to_end.wazuh_api import get_agents_vulnerabilities -from wazuh_testing.end_to_end.waiters import wait_until_vd_is_updated, wait_until_vuln_scan_agents_finished +from wazuh_testing.end_to_end.waiters import wait_until_vd_is_updated from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_all_agent, monitoring_events_multihost from wazuh_testing.end_to_end.regex import get_event_regex from wazuh_testing.end_to_end.indexer_api import get_indexer_values from wazuh_testing.tools.configuration import load_configuration_template from wazuh_testing.tools.system import HostManager from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations -from wazuh_testing.end_to_end.vulnerability_detector import detect_alerts_by_agent +from wazuh_testing.end_to_end.vulnerability_detector import get_alerts_by_agent from wazuh_testing.modules.syscollector import TIMEOUT_SYSCOLLECTOR_SCAN @@ -73,13 +73,28 @@ 'manager': os.path.join(configurations_dir, 'manager.yaml'), 'agent': os.path.join(configurations_dir, 'agent.yaml') } +vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") -def list_of_dicts_to_set_of_tuples(lst): + +def collect_evidences(host_manager, test_name, evidences_to_collect): """ - Convert a list of dictionaries to a set of frozensets (tuples). - Each frozenset represents the items of a dictionary. + Collect evidences for the test """ - return set(frozenset(d.items()) for d in lst) + evidences_directory = os.path.join('..', 'logs', test_name) + + print(evidences_directory) + + for evidence, content in evidences_to_collect.items(): + evidence_file = os.path.join(evidences_directory, evidence + ".log") + with open(evidence_file, 'w') as evidence_file: + if content.__class__ == dict: + try: + json.dump(content, evidence_file, indent=4) + except TypeError: + logger.critical(f"Error dumping {evidence} to file") + pass + else: + evidence_file.write(str(content)) def load_vulnerability_detector_configurations(host_manager): @@ -134,14 +149,22 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: logger.error("Configuring environment") configure_environment(host_manager, load_vulnerability_detector_configurations(host_manager)) + # Truncate alerts and logs of managers and agents + logger.error("Truncate managers and agents logs") + truncate_remote_host_group_files(host_manager, 'all', 'logs') + # Restart managers and stop agents logger.error("Stopping agents") host_manager.control_environment('stop', ['agent']) logger.error("Restarting managers") host_manager.control_environment('restart', ['manager']) + utc_now_timestamp = datetime.datetime.utcnow() + + # Format the date and time as per the given format + test_timestamp = utc_now_timestamp.strftime("%Y-%m-%dT%H:%M:%SZ") + # Wait until VD is updated - # To do: Change VD Feeds updated logger.error("Wait until Vulnerability Detector has update all the feeds") wait_until_vd_is_updated(host_manager) @@ -149,11 +172,17 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: logger.error("Truncate managers and agents logs") truncate_remote_host_group_files(host_manager, 'all', 'logs') + # Re-Register agents: https://github.com/wazuh/wazuh/issues/21185 + logger.error("Removing agents") + host_manager.remove_agents() + + # Wait until agents are registered again + time.sleep(15) + # Start agents - logger.error("Starting agents") host_manager.control_environment('start', ['agent']) - yield + yield test_timestamp # Truncate alerts and logs of managers and agents logger.error("Truncate managers and agents logs") @@ -172,11 +201,12 @@ def check_vuln_state_consistency(vulnerabilities_alerts, vulnerabilities_states) alerts_vulnerabilities = [] indices_vulnerabilities = [] - for vulnerability in vulnerabilities_alerts.values(): - alert_agent = vulnerability['_source']['agent']['name'] - alert_cve = vulnerability['data']['vulnerability']['cve'] - alert_package_version = vulnerability['data']['vulnerability']['package']['version'] - alert_package_name = vulnerability['data']['vulnerability']['package']['name'] + print(vulnerabilities_alerts.__class__) + for alert in list(vulnerabilities_alerts.values())[0]: + alert_agent = alert['_source']['agent']['name'] + alert_cve = alert['_source']['data']['vulnerability']['cve'] + alert_package_version = alert['_source']['data']['vulnerability']['package']['version'] + alert_package_name = alert['_source']['data']['vulnerability']['package']['name'] alerts_vulnerabilities.append({ 'cve': alert_cve, 'agent': alert_agent, @@ -184,12 +214,12 @@ def check_vuln_state_consistency(vulnerabilities_alerts, vulnerabilities_states) 'package_version': alert_package_version }) + for vulnerabilities_state in vulnerabilities_states['hits']['hits']: + state_agent = vulnerabilities_state['_source']['agent']['name'] + state_cve = vulnerabilities_state['_source']['vulnerability']['id'] + state_package_name = vulnerabilities_state['_source']['package']['name'] + state_package_version = vulnerabilities_state['_source']['package']['version'] - for vulnerabilities_state in vulnerabilities_states: - state_agent = vulnerabilities_state['agent']['name'] - state_cve = vulnerabilities_state['vulnerability']['enumeration'] - state_package_name = vulnerabilities_state['package']['name'] - state_package_version = vulnerabilities_state['agent']['version'] indices_vulnerabilities.append({ 'cve': state_cve, 'agent': state_agent, @@ -197,16 +227,33 @@ def check_vuln_state_consistency(vulnerabilities_alerts, vulnerabilities_states) 'package_version': state_package_version }) - indices_vulnerabilities_set = list_of_dicts_to_set_of_tuples(indices_vulnerabilities) - alerts_vulnerabilities_set = list_of_dicts_to_set_of_tuples(alerts_vulnerabilities) + if len(alerts_vulnerabilities) != len(indices_vulnerabilities): + logger.critical("The number of alerts is not the same as the number of states") + logger.critical(f"Alerts: {len(alerts_vulnerabilities)}") + logger.critical(f"States: {len(indices_vulnerabilities)}") - # Assert that the sets are equal - assert indices_vulnerabilities_set == alerts_vulnerabilities_set, "Discrepancies beetween alerts and states indices" + alerts_not_in_state = [] + states_not_in_alerts = [] + + # Check that all alerts are in the index + for alert in alerts_vulnerabilities: + if alert not in indices_vulnerabilities: + alerts_not_in_state.append(alert) + + # Check that all index states are in the alerts + for state in indices_vulnerabilities: + if state not in alerts_vulnerabilities: + states_not_in_alerts.append(state) + + return { + 'alerts_not_in_state': alerts_not_in_state, + 'states_not_in_alerts': states_not_in_alerts + } @pytest.mark.dependency() @pytest.mark.filterwarnings('ignore::urllib3.exceptions.InsecureRequestWarning') -def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): +def test_syscollector_initial_scans(request, host_manager, setup_vulnerability_tests): """ description: Validates the initiation of Syscollector scans across all agents in the environment. @@ -237,7 +284,29 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): - syscollector - vulnerability_detector """ - TIMEOUT_AGENTS_VULNERABILITY_SCAN = 300 + + tests_results = { + 'checks': { + 'syscollector_first_scan': True, + 'all_agents_scanned_first_scan': True, + 'all_agents_vulnerabilities_detected': True, + 'index_state_consistent': True, + 'syscollector_second_scan': True, + 'vulnerabilities_equal_between_scans': True, + 'vulnerabilities_equal_between_scans_indexer': True + }, + 'evidences': { + 'agents_not_scanned_first_scan': [], + 'agents_syscollector_scan_not_started': [], + 'agents_syscollector_second_scan_not_started': [], + 'agents_not_detected_vulnerabilities': [], + 'index_state_inconsistencies': {}, + 'vulnerabilities_not_equal_between_scans': [], + 'vulnerabilities_not_equal_between_scans_indexer': [], + 'agents_different_between_scans': [] + } + } + TIMEOUT_AGENTS_VULNERABILITY_SCAN = 200 # Monitor for the first Syscollector scan in all the agents logger.critical("Monitoring Syscollector First Scan") @@ -246,7 +315,18 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): get_event_regex({'event': 'syscollector_scan_end'})], [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) - monitoring_events_multihost(host_manager, monitoring_data) + elements_not_found = monitoring_events_multihost(host_manager, monitoring_data) + + if any(elements_not_found.values()): + tests_results['checks']['syscollector_first_scan'] = False + + for element in elements_not_found: + if elements_not_found[element]: + tests_results['evidences']['agents_syscollector_scan_not_started'].append(element.keys()) + + logging.critical(f"Syscollector scan not started in the following agents:" + f"{tests_results['evidences']['agents_syscollector_scan_not_started']}." + 'Continuing with the test') # Truncate agents logs to detect second scan logger.critical("Truncating agent's logs") @@ -254,28 +334,48 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): # Wait until all agents has been scanned logger.critical("Waiting until agent's VD scan is over") - - # ToDo: Replace with relevan event. For now timeout + # Replace with relevan event. For now timeout time.sleep(TIMEOUT_AGENTS_VULNERABILITY_SCAN) - # wait_until_vuln_scan_agents_finished(host_manager) logger.critical("Check agent's vulnerabilities") + alerts_first_scan = get_indexer_values(host_manager, + greater_than_timestamp=setup_vulnerability_tests)['hits']['hits'] + vuln_alerts_by_agent_first_scan = get_alerts_by_agent(alerts_first_scan, 'CVE.*? affects.*"?') + - # Check that each agent has generated alerts - agents_vuln_first_scan = {} - indexer_alerts_first_scan = get_indexer_values(host_manager)['hits']['hits'] + logger.critical(f"List of alerts first scan: {vuln_alerts_by_agent_first_scan}") - vuln_alerts_by_agent_first_scan = detect_alerts_by_agent(indexer_alerts_first_scan, 'CVE. affects.*') # Check that it has been triggered vulnerability detector alerts + logger.critical("Checking that all agents has been scanned") for agent in host_manager.get_group_hosts('agent'): - assert agent not in vuln_alerts_by_agent_first_scan, f"No vulnerabilities were detected for Agent {agent}" - assert len(vuln_alerts_by_agent_first_scan[agent]) != 0 - - # Check index state is not empty - index_state_first_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities') - - check_vuln_state_consistency(index_state_first_scan, vuln_alerts_by_agent_first_scan) + if agent not in vuln_alerts_by_agent_first_scan.keys(): + logger.critical(f"Agent {agent} has not been scanned. Continuing with remaining agents") + tests_results['checks']['all_agents_vulnerabilities_detected'] = False + tests_results['evidences']['agents_not_scanned_first_scan'].append(agent) + + if len(vuln_alerts_by_agent_first_scan[agent]) == 0: + logger.critical(f"Agent {agent} has not generated vulnerabilities. Continuing with remaining agents") + tests_results['checks']['all_agents_vulnerabilities_detected'] = False + tests_results['evidences']['agents_not_detected_vulnerabilities'].append(agent) + + # Check vulnerabilities in the index + logger.critical("Checking vulnerabilities in the index") + index_state_first_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', + greater_than_timestamp=setup_vulnerability_tests) + + logger.critical(f"List of indexer indices first scan: {vuln_alerts_by_agent_first_scan}") + + # Check that the index is consistent with the alerts + logging.critical("Checking index state consistency") + tests_results['evidences']['index_state_inconsistencies'] = \ + check_vuln_state_consistency(vuln_alerts_by_agent_first_scan, + index_state_first_scan) + + if tests_results['evidences']['index_state_inconsistencies']['alerts_not_in_state'] or \ + tests_results['evidences']['index_state_inconsistencies']['states_not_in_alerts']: + logger.critical("Index state is not consistent with the alerts") + tests_results['checks']['index_state_consistent'] = False # Truncate manager_logs to prevent trigger wait_until_vuln_scan_agents_finished wrongly logger.critical("Truncating manager logs") @@ -288,28 +388,132 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): get_event_regex({'event': 'syscollector_scan_end'})], [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) - monitoring_events_multihost(host_manager, monitoring_data) + elements_not_found = monitoring_events_multihost(host_manager, monitoring_data) + + logger.critical("Checking that all agents has been scanned") + if any(elements_not_found.values()): + tests_results['checks']['syscollector_second_scan'] = False + + for element in elements_not_found: + if elements_not_found[element]: + tests_results['evidences']['agents_syscollector_second_scan_not_started'].append(element.keys()) + + logging.critical(f"Syscollector scan not started in the following agents:" + f"{tests_results['evidences']['agents_syscollector_second_scan_not_started']}." + 'Continuing with the test') + + logger.critical("Waiting until agent's VD scan is over") + time.sleep(60) + + logger.critical("Checking vulnerabilities in the second scan") + alerts_second_scan = get_indexer_values(host_manager, + greater_than_timestamp=setup_vulnerability_tests)['hits']['hits'] + + vuln_alerts_by_agent_second_scan = get_alerts_by_agent(alerts_second_scan, 'CVE.*? affects.*"?') + + alert_present_in_first_scan_not_in_second_scan = [] + alert_present_in_second_scan_not_in_second_scan = [] + + if len(vuln_alerts_by_agent_second_scan) != len(vuln_alerts_by_agent_first_scan): + tests_results['checks']['vulnerabilities_equal_between_scans'] = False + logger.critical("The number of vulnerabilities is not the same between scans") + logger.critical(f"First scan: {len(vuln_alerts_by_agent_first_scan)}") + logger.critical(f"Second scan: {len(vuln_alerts_by_agent_second_scan)}") + + logger.critical("Checking that all agents has been scanned") + # Check if the number of agents for each scan is the same + if list(vuln_alerts_by_agent_first_scan.keys()) != list(vuln_alerts_by_agent_second_scan.keys()): + tests_results['checks']['vulnerabilities_equal_between_scans'] = False + logging.critical(f"Agents with vulnerabilities changed between scans: " + f"First scan: {list(vuln_alerts_by_agent_first_scan.keys())}" + f"Second scan: {list(vuln_alerts_by_agent_second_scan.keys())}") + tests_results['evidences']['agents_different_between_scans'] = \ + list(set(list(vuln_alerts_by_agent_first_scan.keys())) ^ set(list(vuln_alerts_by_agent_second_scan.keys()))) + + logger.critical("Checking that all agents has been scanned") + # Check if the number of vulnerabilities for each agent is the same + for agent in vuln_alerts_by_agent_second_scan.keys(): + for alert in list(vuln_alerts_by_agent_second_scan[agent][0]): + alert_present_in_second_scan_not_in_second_scan.append(alert) + + for alert in list(vuln_alerts_by_agent_first_scan[agent][0]): + if alert in alert_present_in_first_scan_not_in_second_scan: + alert_present_in_first_scan_not_in_second_scan.remove(alert) + + logger.critical("Checking that all agents has been scanned") + if alert_present_in_first_scan_not_in_second_scan or alert_present_in_second_scan_not_in_second_scan: + tests_results['checks']['vulnerabilities_equal_between_scans'] = False + tests_results['evidences']['vulnerabilities_not_equal_between_scans'] = { + 'alert_present_in_first_scan_not_in_second_scan': alert_present_in_first_scan_not_in_second_scan, + 'alert_present_in_second_scan_not_in_second_scan': alert_present_in_second_scan_not_in_second_scan + } + + logger.critical("Checking vulnerabilities in the second scan") + index_state_second_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', + greater_than_timestamp=setup_vulnerability_tests) + logger.critical(f"List of indices second scan: {vuln_alerts_by_agent_second_scan}") + + if index_state_second_scan != index_state_first_scan: + tests_results['checks']['vulnerabilities_equal_between_scans_indexer'] = False + tests_results['evidences']['vulnerabilities_not_equal_between_scans_indexer'] = { + 'index_state_first_scan': index_state_first_scan, + 'index_state_second_scan': index_state_second_scan + } + + test_result = all(tests_results['checks'].values()) + + + if not test_result: + logger.critical("Test failed. Test results:") + if not tests_results['checks']['syscollector_first_scan']: + logger.critical("Syscollector scan not started in the following agents:" + f"{tests_results['evidences']['agents_syscollector_scan_not_started']}") + + if not tests_results['checks']['all_agents_scanned_first_scan']: + logger.critical("Not all agents were scanned in the first scan. Missing agents:" + f"{tests_results['evidences']['agents_not_scanned_first_scan']}") + + if not tests_results['checks']['all_agents_vulnerabilities_detected']: + logger.critical("Not all agents generated vulnerabilities. Missing agents:" + f"{tests_results['evidences']['agents_not_detected_vulnerabilities']}") + + if not tests_results['checks']['index_state_consistent']: + logger.critical("Index state is not consistent with the alerts. Inconsistencies:" + f"{tests_results['evidences']['index_state_inconsistencies']}") - # To Do: Replace with relevan event. For now timeout - time.sleep(300) + if not tests_results['checks']['syscollector_second_scan']: + logger.critical("Syscollector scan not started in the following agents:" + f"{tests_results['evidences']['agents_syscollector_second_scan_not_started']}") - agents_vuln_second_scan = {} - indexer_alerts_second_scan = get_indexer_values(host_manager)['hits']['hits'] - vuln_alerts_by_agent_second_scan = detect_alerts_by_agent(indexer_alerts_second_scan, 'CVE. affects.*') + if not tests_results['checks']['vulnerabilities_equal_between_scans']: + logger.critical("The number of vulnerabilities is not the same between scans. Inconsistencies:" + f"{tests_results['evidences']['vulnerabilities_not_equal_between_scans']}") - assert vuln_alerts_by_agent_first_scan == vuln_alerts_by_agent_second_scan, \ - "Differences between first and second syscollector" \ - f"First: Scan: {agents_vuln_first_scan}" \ - f"Second Scan: {agents_vuln_second_scan}" + if not tests_results['checks']['vulnerabilities_equal_between_scans_indexer']: + logger.critical("The number of vulnerabilities is not the same between scans. Inconsistencies:" + f"{tests_results['evidences']['vulnerabilities_not_equal_between_scans_indexer']}") - # Check index state - index_state_second_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities') - assert index_state_second_scan == index_state_first_scan, f"Index state value changed between scans: " \ - f"First scan: {index_state_first_scan}" \ - f"Second scan: {index_state_second_scan}" + logger.critical("Gathering evidences") + + evidences_to_collect = { + "alerts_first_scan": vuln_alerts_by_agent_first_scan, + "alerts_second_scan": vuln_alerts_by_agent_second_scan, + "index_state_first_scan": index_state_first_scan, + "index_state_second_scan": index_state_second_scan, + "index_alerts_inconsistences": tests_results['evidences']['index_state_inconsistencies'], + "differences_alerts_between_scans": + tests_results['evidences']['vulnerabilities_not_equal_between_scans'], + "differences_index_between_scans": + tests_results['evidences']['vulnerabilities_not_equal_between_scans_indexer'] + } + + collect_evidences(host_manager, request.node.name, evidences_to_collect) + + pytest.fail("Test failed. Check logs for more information") # ------------------------- + cases = {} with open(os.path.join(current_dir, os.path.join('cases', 'test_vulnerability.yaml')), 'r') as cases_file: @@ -324,7 +528,8 @@ def test_syscollector_initial_scans(host_manager, setup_vulnerability_tests): for case in cases ] -dependencies = [None if 'depends' not in case else pytest.mark.depends(name=case['id'], depends=case['depends']) for case in cases] +dependencies = [None if 'depends' not in case else pytest.mark.depends(name=case['id'], + depends=case['depends']) for case in cases] list_ids = [case['id'] for case in cases] From 574ce1ebc6cfeab7b7b6f83566c2f30ea71ad3ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 16 Jan 2024 18:12:04 +0000 Subject: [PATCH 089/174] feat: include delete package operations in AG Syscollector messages --- .../wazuh_testing/data/syscollector.py | 2 +- .../data/syscollector_parsed_packages.json | 9 ++ .../wazuh_testing/tools/agent_simulator.py | 95 ++++++++++++++++--- 3 files changed, 91 insertions(+), 15 deletions(-) create mode 100644 deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json diff --git a/deps/wazuh_testing/wazuh_testing/data/syscollector.py b/deps/wazuh_testing/wazuh_testing/data/syscollector.py index f98d5c36b6..3091d849d8 100644 --- a/deps/wazuh_testing/wazuh_testing/data/syscollector.py +++ b/deps/wazuh_testing/wazuh_testing/data/syscollector.py @@ -59,7 +59,7 @@ "format": "", "groups": "editors", "install_time": "", - "item_id": "", + "item_id": "", "location": " ", "multiarch": "null", "name": "", diff --git a/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json b/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json new file mode 100644 index 0000000000..a41b357172 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json @@ -0,0 +1,9 @@ +[ + { + "vendor": "bsdi", + "product": "bsd_os", + "version": "3.1", + "checksum": "66e8140425c4e12c2d35e8267d31deb2853f1f5e", + "item_id": "6bbdd1d1b4337b9cb73b2e6520528b013a9aab0c" + } +] diff --git a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py index 35874531fa..6c96c12830 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py +++ b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py @@ -775,15 +775,18 @@ def __init__(self, agent_name, event_types_list, old_format, batch_size, syscoll } self.syscollector_packages_vuln_content = syscollector_packages_vuln_content - self.default_package_data = { - '': 'A low-level cryptographic library', - '': 'x86_64', - '': 'rpm', - '': 'nettle', - '': 'vim', - '': 'Ubuntu Developers ', - '': '2.7.1-9.el7_9' - } + self.packages = [ + { + 'installed': False, + 'description': 'A low-level cryptographic library', + 'architecture': 'x86_64', + 'format': 'rpm', + 'name': 'nettle', + 'source': 'vim', + 'vendor': 'Ubuntu Developers ', + 'version': '2.7.1-9.el7_9' + } + ] self.old_format = old_format self.agent_name = agent_name @@ -791,6 +794,64 @@ def __init__(self, agent_name, event_types_list, old_format, batch_size, syscoll self.syscollector_tag = 'syscollector' self.syscollector_mq = 'd' self.current_id = 1 + self.default_packages_vuln_content = os.path.join(_data_path, 'syscollector_parsed_packages.json') + + self.package_index = 0 + + if self.syscollector_packages_vuln_content: + self.packages = self.init_package_data(self.syscollector_packages_vuln_content) + else: + self.packages = self.init_package_data(self.default_packages_vuln_content) + + def parse_package_template(self, message, package_data): + template_package_fields = { + '': package_data['description'], + '': package_data['architecture'], + '': package_data['format'], + '': package_data['product'], + '': package_data['source'], + '': package_data['vendor'], + '': package_data['version'], + '': package_data['item_id'] + } + + for package_key, package_value in template_package_fields.items(): + message = message.replace(package_key, package_value) + + return message + + def get_package_data(self): + operation = 'INSERTED' if not self.packages[self.package_index]['installed'] else 'DELETED' + print(f"Current package operation: {operation}") + + package_data = self.packages[self.package_index] + + self.package_index = (self.package_index + 1) % len(self.packages) + + return package_data, operation + + def init_package_data(self, packages_file): + """Get package data from a json file. + Returns: + dict: Package data. + """ + with open(os.path.join(_data_path, packages_file), 'r') as fp: + package_data = json.load(fp) + + for package in package_data: + package['installed'] = False + if 'description' not in package: + package['description'] = '' + if 'architecture' not in package: + package['architecture'] = '' + if 'format' not in package: + package['format'] = '' + if 'source' not in package: + package['source'] = '' + if 'item_id' not in package: + package['item_id'] = get_random_string(10) + + return package_data def get_event_template_legacy(self, message_type): """Get syscollector legacy message of the specified type. @@ -831,6 +892,7 @@ def get_event_template(self, message_type): message_operation = operation message_data = {} + package_data = {} if message_type == 'network': message_data = syscollector.SYSCOLLECTOR_NETWORK_IFACE_DELTA_EVENT_TEMPLATE @@ -847,12 +909,22 @@ def get_event_template(self, message_type): elif message_type == 'hotfix': message_data = syscollector.SYSCOLLECTOR_HOTFIX_DELTA_DATA_TEMPLATE + if message_type == 'packages': + print("PACKAGEEEEEEEEEEEES") + package_data, operation = self.get_package_data() + message_operation = operation + message = '{"type": "%s", "data": %s, "operation": "%s"}' % ( message_event_type, re.sub(r'\s', '', json.dumps(message_data)), message_operation ) + if message_type == 'packages': + message = self.parse_package_template(message, package_data) + print(f"Current package index: {self.package_index}") + self.packages[self.package_index]['installed'] = not self.packages[self.package_index]['installed'] + return message def format_event_template(self, template, message_type=None): @@ -869,11 +941,6 @@ def format_event_template(self, template, message_type=None): for variable, value in generics_fields_to_replace: message = message.replace(variable, value) - if message_type == 'packages': - if not self.syscollector_packages_vuln_content: - for package_key, package_value in self.default_package_data.items(): - message = message.replace(package_key, package_value) - final_mesage = f"{self.syscollector_mq}:{self.syscollector_tag}:{message}" return final_mesage From fb82a3b3e5ab4685c7947b1f314b7a736a963d66 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 16 Jan 2024 18:22:55 +0000 Subject: [PATCH 090/174] fix: remove debug messages --- .../wazuh_testing/data/syscollector_parsed_packages.json | 2 -- deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py | 1 - 2 files changed, 3 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json b/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json index a41b357172..f744a353e9 100644 --- a/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json +++ b/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json @@ -3,7 +3,5 @@ "vendor": "bsdi", "product": "bsd_os", "version": "3.1", - "checksum": "66e8140425c4e12c2d35e8267d31deb2853f1f5e", - "item_id": "6bbdd1d1b4337b9cb73b2e6520528b013a9aab0c" } ] diff --git a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py index 6c96c12830..5d6bd1699c 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py +++ b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py @@ -910,7 +910,6 @@ def get_event_template(self, message_type): message_data = syscollector.SYSCOLLECTOR_HOTFIX_DELTA_DATA_TEMPLATE if message_type == 'packages': - print("PACKAGEEEEEEEEEEEES") package_data, operation = self.get_package_data() message_operation = operation From 53dc3aaa59fc62826debd291a6b3f650a6577c96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 17 Jan 2024 10:02:12 +0000 Subject: [PATCH 091/174] feat: include content snapshot parsing script --- .../scripts/parse_packages_content.py | 76 +++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 deps/wazuh_testing/wazuh_testing/scripts/parse_packages_content.py diff --git a/deps/wazuh_testing/wazuh_testing/scripts/parse_packages_content.py b/deps/wazuh_testing/wazuh_testing/scripts/parse_packages_content.py new file mode 100644 index 0000000000..d051a96ab3 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/scripts/parse_packages_content.py @@ -0,0 +1,76 @@ +import argparse +import json +import logging + + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("parse_packages_content.py") + +config = None + + +def parse_packages_content(output_file, packages_file, n_packages): + list_packages = [] + + with open(packages_file) as f: + for line in f: + if len(list_packages) >= n_packages: + break + + if line.strip(): + config = json.loads(line) + + if 'payload' in config and 'containers' in config['payload'] \ + and 'cna' in config['payload']['containers']: + if 'affected' in config['payload']['containers']['cna']: + for affected in config['payload']['containers']['cna']['affected']: + vendor = affected['vendor'] + product = affected['product'] + + for affected_version in affected['versions']: + status, version = affected_version['status'], affected_version['version'] + + if status == 'affected': + list_packages.append({ + 'vendor': vendor, + 'product': product, + 'version': version + }) + else: + logger.debug("No affected found in package: %s", config) + else: + logger.warning("No payload found for package: %s", config['id']) + logger.debug("Package: %s", config) + + with open(output_file, 'w') as f: + json.dump(list_packages, f, indent=4) + + +def main(): + arg_parser = argparse.ArgumentParser() + + arg_parser.add_argument('-p', '--packages', metavar='', type=str, required=True, + help='Packages file', dest='packages_file') + + arg_parser.add_argument('-n', '--n_packages', metavar='', type=int, required=True, + help='Number of packages to parse', dest='n_packages') + + arg_parser.add_argument('-d', '--debug', action='store_true', help='Enable debug mode', dest='debug') + + arg_parser.add_argument('-o', '--output', metavar='', type=str, required=True, + help='Output file', dest='output_file') + + args = arg_parser.parse_args() + + if args.debug: + logger.setLevel(logging.DEBUG) + else: + logger.setLevel(logging.INFO) + + logging.info("Parsing packages content...") + parse_packages_content(args.output_file, args.packages_file, args.n_packages) + logging.info("Packages parsed successfully") + + +if __name__ == '__main__': + main() From 542c492436a2e71f465729ad8cefdd72bf911df9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 17 Jan 2024 10:03:01 +0000 Subject: [PATCH 092/174] feat: increase default packages to 10 --- .../data/syscollector_parsed_packages.json | 112 +++++++++++++++++- 1 file changed, 111 insertions(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json b/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json index f744a353e9..edb0840c08 100644 --- a/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json +++ b/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json @@ -2,6 +2,116 @@ { "vendor": "bsdi", "product": "bsd_os", - "version": "3.1", + "version": "3.1" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "1.0" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "1.1" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "1.1.5.1" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "1.2" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.0" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.0.1" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.0.5" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.1.5" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.1.6" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.1.6.1" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.1.7" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.1.7.1" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.2" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.2.2" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.2.3" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.2.4" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.2.5" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.2.6" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "2.2.8" + }, + { + "vendor": "freebsd", + "product": "freebsd", + "version": "3.0" + }, + { + "vendor": "openbsd", + "product": "openbsd", + "version": "2.3" + }, + { + "vendor": "openbsd", + "product": "openbsd", + "version": "2.4" } ] From be408be25de8117f08d5f1ecf840480d9d271000 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 17 Jan 2024 10:10:53 +0000 Subject: [PATCH 093/174] style: remove nonused default package list --- .../scripts/parse_packages_content.py | 8 ++-- .../wazuh_testing/tools/agent_simulator.py | 37 +++++++++---------- 2 files changed, 22 insertions(+), 23 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/scripts/parse_packages_content.py b/deps/wazuh_testing/wazuh_testing/scripts/parse_packages_content.py index d051a96ab3..907d012fa4 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/parse_packages_content.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/parse_packages_content.py @@ -6,11 +6,10 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger("parse_packages_content.py") -config = None - def parse_packages_content(output_file, packages_file, n_packages): list_packages = [] + config = None with open(packages_file) as f: for line in f: @@ -55,11 +54,12 @@ def main(): arg_parser.add_argument('-n', '--n_packages', metavar='', type=int, required=True, help='Number of packages to parse', dest='n_packages') - arg_parser.add_argument('-d', '--debug', action='store_true', help='Enable debug mode', dest='debug') - arg_parser.add_argument('-o', '--output', metavar='', type=str, required=True, help='Output file', dest='output_file') + arg_parser.add_argument('-d', '--debug', action='store_true', help='Enable debug mode', dest='debug') + + args = arg_parser.parse_args() if args.debug: diff --git a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py index 5d6bd1699c..1b2dfc84c3 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py +++ b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py @@ -775,18 +775,7 @@ def __init__(self, agent_name, event_types_list, old_format, batch_size, syscoll } self.syscollector_packages_vuln_content = syscollector_packages_vuln_content - self.packages = [ - { - 'installed': False, - 'description': 'A low-level cryptographic library', - 'architecture': 'x86_64', - 'format': 'rpm', - 'name': 'nettle', - 'source': 'vim', - 'vendor': 'Ubuntu Developers ', - 'version': '2.7.1-9.el7_9' - } - ] + self.packages = [] self.old_format = old_format self.agent_name = agent_name @@ -794,16 +783,23 @@ def __init__(self, agent_name, event_types_list, old_format, batch_size, syscoll self.syscollector_tag = 'syscollector' self.syscollector_mq = 'd' self.current_id = 1 - self.default_packages_vuln_content = os.path.join(_data_path, 'syscollector_parsed_packages.json') + self.default_packages_vuln_content = os.path.join(_data_path, 'syscollector_parsed_packages.json') self.package_index = 0 if self.syscollector_packages_vuln_content: - self.packages = self.init_package_data(self.syscollector_packages_vuln_content) + self.packages = self.init_package_list(self.syscollector_packages_vuln_content) else: - self.packages = self.init_package_data(self.default_packages_vuln_content) + self.packages = self.init_package_list(self.default_packages_vuln_content) def parse_package_template(self, message, package_data): + """Parse package template with package data. + Args: + message (str): Syscollector event message. + package_data (dict): Package data. + Returns: + str: Parsed syscollector event message. + """ template_package_fields = { '': package_data['description'], '': package_data['architecture'], @@ -821,16 +817,20 @@ def parse_package_template(self, message, package_data): return message def get_package_data(self): + """Get package data. + Returns: + dict: Package data. + str: Operation (INSERTED or DELETED). + """ + operation = 'INSERTED' if not self.packages[self.package_index]['installed'] else 'DELETED' - print(f"Current package operation: {operation}") package_data = self.packages[self.package_index] - self.package_index = (self.package_index + 1) % len(self.packages) return package_data, operation - def init_package_data(self, packages_file): + def init_package_list(self, packages_file): """Get package data from a json file. Returns: dict: Package data. @@ -921,7 +921,6 @@ def get_event_template(self, message_type): if message_type == 'packages': message = self.parse_package_template(message, package_data) - print(f"Current package index: {self.package_index}") self.packages[self.package_index]['installed'] = not self.packages[self.package_index]['installed'] return message From b1a3c22f30c9b88a201edeed4c6b3d32723f1df4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 17 Jan 2024 10:12:54 +0000 Subject: [PATCH 094/174] refac: remove intermediate var --- deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py index 1b2dfc84c3..490a18e652 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py +++ b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py @@ -910,8 +910,7 @@ def get_event_template(self, message_type): message_data = syscollector.SYSCOLLECTOR_HOTFIX_DELTA_DATA_TEMPLATE if message_type == 'packages': - package_data, operation = self.get_package_data() - message_operation = operation + package_data, message_operation = self.get_package_data() message = '{"type": "%s", "data": %s, "operation": "%s"}' % ( message_event_type, From c79b414fa29431b02df2d20cc81ed593937bab7c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 17 Jan 2024 11:25:01 +0000 Subject: [PATCH 095/174] fix: unmatched package index --- deps/wazuh_testing/setup.py | 1 + deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/deps/wazuh_testing/setup.py b/deps/wazuh_testing/setup.py index f2b2c21098..8ef5064d1b 100644 --- a/deps/wazuh_testing/setup.py +++ b/deps/wazuh_testing/setup.py @@ -30,6 +30,7 @@ 'qa_ctl/provisioning/wazuh_deployment/templates/preloaded_vars.conf.j2', 'data/qactl_conf_validator_schema.json', 'data/all_disabled_ossec.conf', + 'data/syscollector_parsed_packages.json', 'tools/migration_tool/delta_schema.json', 'tools/migration_tool/CVE_JSON_5.0_bundled.json' ] diff --git a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py index 490a18e652..c1384ca57d 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py +++ b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py @@ -822,10 +822,11 @@ def get_package_data(self): dict: Package data. str: Operation (INSERTED or DELETED). """ - - operation = 'INSERTED' if not self.packages[self.package_index]['installed'] else 'DELETED' + operation = 'DELETED' if self.packages[self.package_index]['installed'] else 'INSERTED' package_data = self.packages[self.package_index] + + self.packages[self.package_index]['installed'] = not self.packages[self.package_index]['installed'] self.package_index = (self.package_index + 1) % len(self.packages) return package_data, operation @@ -920,7 +921,6 @@ def get_event_template(self, message_type): if message_type == 'packages': message = self.parse_package_template(message, package_data) - self.packages[self.package_index]['installed'] = not self.packages[self.package_index]['installed'] return message From f5c96e2a8873497205990417d6cf5ceb7242c497 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 17 Jan 2024 18:13:15 +0000 Subject: [PATCH 096/174] fix: stabilize E2E Vulnerability tests --- .../wazuh_testing/end_to_end/logs.py | 9 + .../wazuh_testing/end_to_end/monitoring.py | 42 +- .../wazuh_testing/end_to_end/regex.py | 4 +- .../end_to_end/remote_operations_handler.py | 52 +- .../end_to_end/vulnerability_detector.py | 115 ++- .../wazuh_testing/end_to_end/waiters.py | 29 +- .../wazuh_testing/reporting/style.css | 4 + tests/end_to_end/pytest.ini | 1 + .../cases/test_vulnerability.yaml | 748 +++++++++--------- .../configurations/agent.yaml | 2 +- .../test_vulnerability_detector/conftest.py | 86 +- .../test_vulnerability_detector.py | 652 ++++++++------- 12 files changed, 1015 insertions(+), 729 deletions(-) create mode 100644 deps/wazuh_testing/wazuh_testing/reporting/style.css diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py index ebf0f5940a..f566a63b8a 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py @@ -38,3 +38,12 @@ def truncate_remote_host_group_files(host_manager: HostManager, host_group: str, log_file_path = ALERTS_JSON_PATH host_manager.truncate_file(host, log_file_path) + + +def get_hosts_logs(host_manager: HostManager, host_group: str = 'all') -> dict: + host_logs = {} + for host in host_manager.get_group_hosts(host_group): + host_os_name = host_manager.get_host_variables(host)['os_name'] + host_logs[host] = host_manager.get_file_content(host, logs_filepath_os[host_os_name]) + + return host_logs diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index 1cbed80e1d..84a8e5e0e5 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -36,7 +36,7 @@ def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict host_manager: An instance of the HostManager class containing information about hosts. monitoring_data: A dictionary containing monitoring data for each host. """ - def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict], scan_interval: int = 5, + def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict], scan_interval: int = 20, ignore_error=False): """ Monitor the specified elements on a host. @@ -50,15 +50,21 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: TimeoutError: If no match is found within the specified timeout. """ elements_not_found = [] + elements_found = [] for element in monitoring_elements: - regex, timeout, monitoring_file = element['regex'], element['timeout'], element['file'] + regex, timeout, monitoring_file, n_iterations = element['regex'], element['timeout'], element['file'], \ + element['n_iterations'] current_timeout = 0 regex_match = None + while current_timeout < timeout: file_content = host_manager.get_file_content(host, monitoring_file) - regex_match = re.search(regex, file_content) - if regex_match: + + match_regex = re.findall(regex, file_content) + if match_regex and len(list(match_regex)) >= n_iterations: + elements_found = list(match_regex) + regex_match = True break sleep(scan_interval) @@ -70,10 +76,16 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: if not ignore_error: raise TimeoutError(f"Element not found: {element}") - host_elements_not_found = {} - host_elements_not_found[host] = elements_not_found + monitoring_result = {} + + if host not in monitoring_result: + monitoring_result[host] = {} + + monitoring_result[host]['not_found'] = elements_not_found - return host_elements_not_found + monitoring_result[host]['found'] = elements_found + + return monitoring_result with ThreadPoolExecutor() as executor: futures = [] @@ -91,7 +103,8 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: return results -def generate_monitoring_logs_all_agent(host_manager: HostManager, regex_list: list, timeout_list: list) -> dict: +def generate_monitoring_logs(host_manager: HostManager, regex_list: list, timeout_list: list, hosts: list, + n_iterations=1) -> dict: """ Generate monitoring data for logs on all agent hosts. @@ -104,19 +117,21 @@ def generate_monitoring_logs_all_agent(host_manager: HostManager, regex_list: li dict: Monitoring data for logs on all agent hosts. """ monitoring_data = {} - for agent in host_manager.get_group_hosts('agent'): + for agent in hosts: monitoring_data[agent] = [] for index, regex_index in enumerate(regex_list): os_name = host_manager.get_host_variables(agent)['os_name'] monitoring_data[agent].append({ 'regex': regex_index, 'file': logs_filepath_os[os_name], - 'timeout': timeout_list[index] + 'timeout': timeout_list[index], + 'n_iterations': n_iterations }) return monitoring_data -def generate_monitoring_logs_manager(host_manager: HostManager, manager: str, regex: str, timeout: int) -> dict: +def generate_monitoring_logs_manager(host_manager: HostManager, manager: str, regex: str, timeout: int, + n_iterations: int = 1) -> dict: """ Generate monitoring data for logs on a specific manager host. @@ -134,8 +149,10 @@ def generate_monitoring_logs_manager(host_manager: HostManager, manager: str, re monitoring_data[manager] = [{ 'regex': regex, 'file': logs_filepath_os[os_name], - 'timeout': timeout + 'timeout': timeout, + 'n_iterations': n_iterations }] + return monitoring_data @@ -165,6 +182,7 @@ def generate_monitoring_alerts_all_agent(host_manager: HostManager, events_metad 'regex': get_event_regex(event), 'file': '/var/ossec/logs/alerts/alerts.json', 'timeout': 120, + 'n_iterations': 1 } if 'parameters' in metadata_agent: monitoring_element['parameters'] = metadata_agent['parameters'] diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py index 36dfba39e1..2ac2e5d1aa 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py @@ -22,10 +22,10 @@ REGEX_PATTERNS = { 'syscollector_scan_start': { - 'regex': '.*INFO: Starting evaluation.' + 'regex': r'(\d{4}\/\d{2}\/\d{2} \d{2}:\d{2}:\d{2}) .*? INFO: Starting evaluation' }, 'syscollector_scan_end': { - 'regex': '.*INFO: Evaluation finished.' + 'regex': r'(\d{4}\/\d{2}\/\d{2} \d{2}:\d{2}:\d{2}) .*? INFO: Evaluation finished' }, 'syscollector_install_package_alert_yum': { 'regex': '.*installed.*agent".*"name":"(\\S+)".*Installed: (\\S+).*?(\\S+)', diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 45980eea99..50ab7353fa 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -24,11 +24,12 @@ from typing import Dict, List from multiprocessing.pool import ThreadPool from datetime import datetime, timezone +import logging from wazuh_testing.end_to_end.indexer_api import get_indexer_values from wazuh_testing.tools.system import HostManager from wazuh_testing.end_to_end.wazuh_api import get_agents_vulnerabilities -from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_all_agent, monitoring_events_multihost +from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs, monitoring_events_multihost from wazuh_testing.end_to_end.waiters import wait_until_vuln_scan_agents_finished from wazuh_testing.end_to_end.regex import get_event_regex from wazuh_testing.end_to_end.logs import truncate_remote_host_group_files @@ -48,18 +49,19 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man Raises: ValueError: If the specified operation is not recognized. """ + logging.critical(f"Launching remote operation: {operation_data}") + host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] host_os_arch = host_manager.get_host_variables(host)['architecture'] system = host_manager.get_host_variables(host)['os_name'] operation = operation_data['operation'] - - print("Performing remote operations") - if system == 'linux': system = host_manager.get_host_variables(host)['os'].split('_')[0] if operation == 'install_package': + logging.critical(f"Installing package on {host}") + package_data = operation_data['package'] package_url = package_data[host_os_name][host_os_arch] @@ -69,24 +71,33 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man else: host_manager.install_package(host, package_url, system) + logging.critical(f"Package installed on {host}") + logging.critical(f"Waiting for syscollector scan to finish on {host}") + + TIMEOUT_SYSCOLLECTOR_SCAN = 80 - TIMEOUT_SYSCOLLECTOR_SCAN = 60 truncate_remote_host_group_files(host_manager, 'agent', 'logs') # Wait until syscollector - monitoring_data = generate_monitoring_logs_all_agent(host_manager, - [get_event_regex({'event': 'syscollector_scan_start'}), - get_event_regex({'event': 'syscollector_scan_end'})], - [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) + monitoring_data = generate_monitoring_logs(host_manager, + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], + host_manager.get_group_hosts('agent')) - monitoring_events_multihost(host_manager, monitoring_data) + result = monitoring_events_multihost(host_manager, monitoring_data) + + logging.critical(f"Syscollector scan finished with result: {result}") truncate_remote_host_group_files(host_manager, 'manager', 'logs') + logging.critical(f"Waiting for vulnerability scan to finish on {host}") + # Wait until VD scan wait_until_vuln_scan_agents_finished(host_manager) elif operation == 'remove_package': + logging.critical(f"Removing package on {host}") package_data = operation_data['package'] package_name = package_data[host_os_name][host_os_arch] host_manager.remove_package(host, package_name, system) @@ -95,10 +106,10 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man truncate_remote_host_group_files(host_manager, 'agent', 'logs') # Wait until syscollector - monitoring_data = generate_monitoring_logs_all_agent(host_manager, + monitoring_data = generate_monitoring_logs(host_manager, [get_event_regex({'event': 'syscollector_scan_start'}), get_event_regex({'event': 'syscollector_scan_end'})], - [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], host_manager.get_group_hosts('agent')) monitoring_events_multihost(host_manager, monitoring_data) @@ -108,11 +119,23 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man wait_until_vuln_scan_agents_finished(host_manager) elif operation == 'check_agent_vulnerability': + logging.critical(f"Checking agent vulnerability on {host}") + + results = { + "alerts_not_found": [], + "states_not_found": [] + } + if operation_data['parameters']['alert_indexed']: - check_vuln_alert_indexer(host_manager, operation_data['vulnerability_data'], current_datetime) + logging.critical(f'Checking vulnerability alerts in the indexer for {host}') + results["alerts_not_found"] = check_vuln_alert_indexer(host_manager, operation_data['vulnerability_data'], current_datetime) if operation_data['parameters']['state_indice']: - check_vuln_state_index(host_manager, operation_data['vulnerability_data'], current_datetime) + logging.critical(f'Checking vulnerability state index for {host}') + results["states_not_found"] = check_vuln_state_index(host_manager, operation_data['vulnerability_data'], current_datetime) + + assert len(results["alerts_not_found"]) == 0 and len(results["states_not_found"]) == 0, \ + f"Vulnerability alerts or states not found for {host}: {results}" def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict], host_manager: HostManager): @@ -141,6 +164,7 @@ def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager) host_manager (HostManager): An instance of the HostManager class containing information about hosts. """ for task in task_list: + logging.critical(f"Launching parallel task: {task}") parallel_configuration = [] target = task['target'] diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py index 14191b02e0..4d428100bf 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py @@ -2,7 +2,7 @@ from wazuh_testing.end_to_end.indexer_api import get_indexer_values from typing import Dict -from datetime import datetime +import logging import re @@ -21,30 +21,48 @@ def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[s greater_than_timestamp=current_datetime)['hits']['hits'] expected_alerts_not_found = [] + logging.critical(f"Checking vulnerability state index {vulnerability_data}") + for agent in host_manager.get_group_hosts('agent'): + logging.critical(f"Checking vulnerability state index for {agent}") + host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] host_os_arch = host_manager.get_host_variables(agent)['architecture'] - if host_os_name in vulnerability_data and host_os_arch in vulnerability_data: - vulnerabilities = vulnerability_data[host_os_name][host_os_arch] - for vulnerability in vulnerabilities: + logging.critical(f"Host OS name: {host_os_name}") + logging.critical(f"Host OS arch: {host_os_arch}") + + if host_os_name in vulnerability_data: + if host_os_arch in vulnerability_data[host_os_name]: + logging.critical(f"Inside Host OS arch: {host_os_arch}") + + vulnerabilities = vulnerability_data[host_os_name][host_os_arch] - for indice_vuln in index_vuln_state_content: - state_agent = indice_vuln['agent']['name'] - state_cve = indice_vuln['vulnerability']['enumeration'] - state_package_name = indice_vuln['package']['name'] - state_package_version = indice_vuln['agent']['version'] + logging.critical(f"Vulnerabilities: {vulnerabilities}") + + for vulnerability in vulnerabilities: found = False + for indice_vuln in index_vuln_state_content: + logging.critical(f"Indice vuln: {indice_vuln}") + + state_agent = indice_vuln['_source']['agent']['name'] + state_cve = indice_vuln["_source"]['vulnerability']['id'] + state_package_name = indice_vuln['_source']['package']['name'] + state_package_version = indice_vuln['_source']['package']['version'] + + if state_agent == agent and state_cve == vulnerability['CVE'] \ + and state_package_name == vulnerability['PACKAGE_NAME'] and \ + state_package_version == vulnerability['PACKAGE_VERSION']: + found = True + + if not found: + expected_alerts_not_found.append(vulnerability) - if state_agent == agent and state_cve == vulnerability['CVE'] \ - and state_package_name == vulnerability['PACKAGE_NAME'] and \ - state_package_version == vulnerability['PACKAGE_VERSION']: - found = True + logging.critical(f"Expected alerts not found: {expected_alerts_not_found}") + logging.critical(f"Triggered alerts: {index_vuln_state_content}") - if not found: - expected_alerts_not_found.append(vulnerability) + return expected_alerts_not_found - assert len(expected_alerts_not_found) == 0, f"Expected alerts were not found {expected_alerts_not_found}" def get_alerts_by_agent(alerts, regex): @@ -72,6 +90,19 @@ def get_alerts_by_agent(alerts, regex): return alerts_vuln_by_agent +def get_indexed_vulnerabilities_by_agent(indexed_vulnerabilities): + vulnerabilities_by_agent = {} + for vulnerabilities_state in indexed_vulnerabilities['hits']['hits']: + if 'agent' in vulnerabilities_state['_source']: + agent = vulnerabilities_state['_source']['agent']['name'] + if agent not in vulnerabilities_by_agent: + vulnerabilities_by_agent[agent] = [] + else: + vulnerabilities_by_agent[agent].append(vulnerabilities_state) + + return vulnerabilities_by_agent + + def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict[str, Dict], current_datetime: str = ''): """ @@ -87,6 +118,8 @@ def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict regex_cve_affects = "CVE.* affects .*" regex_solved_vuln = "The .* that affected .* was solved due to a package removal" + logging.critical(f"Checking vulnerability alerts in the indexer {vulnerability_data}") + indexer_alerts = get_indexer_values(host_manager, greater_than_timestamp=current_datetime)['hits']['hits'] # Get CVE affects alerts for all agents @@ -100,21 +133,41 @@ def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict triggered_alerts = solved_alerts_by_agent for agent in host_manager.get_group_hosts('agent'): + logging.critical(f"Checking vulnerability alerts for {agent}") host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] host_os_arch = host_manager.get_host_variables(agent)['architecture'] + logging.critical(f"Host OS name: {host_os_name}") + logging.critical(f"Host OS arch: {host_os_arch}") + logging.critical(f"Check1: {host_os_arch in vulnerability_data}") + logging.critical(f"Check2: {host_os_name in vulnerability_data}") + + if host_os_name in vulnerability_data: + if host_os_arch in vulnerability_data[host_os_name]: + logging.critical(f"Inside Host OS arch: {host_os_arch}") + vulnerabilities = vulnerability_data[host_os_name][host_os_arch] + for vulnerability in vulnerabilities: + + logging.critical(f"Checking vulnerability: {vulnerability}") + + cve = vulnerability['CVE'] + package = vulnerability['PACKAGE_NAME'] + version = vulnerability['PACKAGE_VERSION'] + found = False + for triggered_alert in triggered_alerts[agent]: + alert_package_name = triggered_alert['_source']['data']['vulnerability']['package']["name"] + alert_package_version = \ + triggered_alert['_source']['data']['vulnerability']['package']['version'] + alert_cve = triggered_alert['_source']['data']['vulnerability']['cve'] + + if alert_cve == cve and alert_package_name == package and \ + alert_package_version == version: + found = True + + if not found: + print(f"Vulnerability not found: {vulnerability}") + expected_alerts_not_found.append(vulnerability) + + logging.critical(f"Expected alerts not found: {expected_alerts_not_found}") + logging.critical(f"Triggered alerts: {triggered_alerts}") - if host_os_name in vulnerability_data and host_os_arch in vulnerability_data: - vulnerabilities = vulnerability_data[host_os_name][host_os_arch] - for vulnerability in vulnerabilities: - cve = vulnerability['CVE'] - package = vulnerabilities['PACKAGE'] - version = vulnerabilities['VERSION'] - found = False - for triggered_alert in triggered_alerts[agent]: - if triggered_alert['cve'] == cve and triggered_alert['package'] == package and \ - triggered_alert['version'] == version: - found = True - if not found: - expected_alerts_not_found.append(vulnerability) - - assert len(expected_alerts_not_found) == 0, f"Expected alerts were not found {expected_alerts_not_found}" + return expected_alerts_not_found diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index 722c238725..e4c1802178 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -23,6 +23,8 @@ from wazuh_testing.end_to_end.wazuh_api import get_agents_id from wazuh_testing.tools.system import HostManager +import time + def wait_until_vd_is_updated(host_manager: HostManager) -> None: """ @@ -52,16 +54,17 @@ def wait_until_vuln_scan_agents_finished(host_manager: HostManager) -> None: # The Vulnerability Detector scans are ordered based on the agent ID. # We are currently awaiting completion of all scans globally, # with a timeout set to 5 minutes for each agent. - final_timeout = 300 * len(host_manager.get_group_hosts('agent')) - - for agent in host_manager.get_group_hosts('agent'): - manager_host = host_manager.get_host_variables(agent)['manager'] - agents_id = get_agents_id(host_manager) - agent_id = agents_id.get(agent, '') - finished_scan_pattern = rf"Finished vulnerability assessment for agent '{agent_id}'" - - monitoring_data = generate_monitoring_logs_manager( - host_manager, manager_host, finished_scan_pattern, final_timeout - ) - - monitoring_events_multihost(host_manager, monitoring_data) + final_timeout = 15 * len(host_manager.get_group_hosts('agent')) + time.sleep(final_timeout) + + # for agent in host_manager.get_group_hosts('agent'): + # manager_host = host_manager.get_host_variables(agent)['manager'] + # agents_id = get_agents_id(host_manager) + # agent_id = agents_id.get(agent, '') + # finished_scan_pattern = rf"Finished vulnerability assessment for agent '{agent_id}'" + # + # monitoring_data = generate_monitoring_logs_manager( + # host_manager, manager_host, finished_scan_pattern, final_timeout + # ) + # + # monitoring_events_multihost(host_manager, monitoring_data) diff --git a/deps/wazuh_testing/wazuh_testing/reporting/style.css b/deps/wazuh_testing/wazuh_testing/reporting/style.css new file mode 100644 index 0000000000..b4a4725ca0 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/reporting/style.css @@ -0,0 +1,4 @@ +.col-links a { + display: block +} + diff --git a/tests/end_to_end/pytest.ini b/tests/end_to_end/pytest.ini index 1757f9625e..67092cba57 100644 --- a/tests/end_to_end/pytest.ini +++ b/tests/end_to_end/pytest.ini @@ -8,3 +8,4 @@ log_cli_date_format=%Y-%m-%d %H:%M:%S log_file_level = ERROR log_file_format = %(asctime)s %(message)s (%(filename)s:%(lineno)s) log_file_date_format = %Y-%m-%d %H:%M:%S + diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 49b6667af3..20d7cbbab6 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -8,7 +8,8 @@ target: agent package: centos: - amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.x86_64.rpm + # amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.x86_64.rpm + amd64: https://nmap.org/dist/nmap-6.46-1.x86_64.rpm arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.aarch64.rpm ubuntu: amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb @@ -22,14 +23,17 @@ - operation: check_agent_vulnerability target: agent parameters: - alert_indexed: False - state_indice: False + alert_indexed: True + state_indice: True vulnerability_data: centos: amd64: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" - CVE: CVE-2023-2183 + # - PACKAGE_NAME: "grafana" + # PACKAGE_VERSION: "8.5.5" + # CVE: CVE-2023-2183 + - PACKAGE_NAME: "nmap" + PACKAGE_VERSION: "6.46-1" + CVE: CVE-2018-15173 arm64v8: - PACKAGE_NAME: "grafana" PACKAGE_VERSION: "8.5.5" @@ -59,397 +63,397 @@ CVE: CVE-2022-21824 -- case: "Upgrade of a vulnerable package: Remain vulnerable" - id: "upgrade_package_remain_vulnerable" - description: "Upgrade of a vulnerable package: Remain vulnerable" - preconditions: null - depends: "install_package" - body: - tasks: - - operation: install_package - target: agent - package: - centos: - amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb - arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb - ubuntu: - amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb - arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb - windows: - amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.7-win64.exe - macos: - amd64: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg - arm64v8: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg +# - case: "Upgrade of a vulnerable package: Remain vulnerable" +# id: "upgrade_package_remain_vulnerable" +# description: "Upgrade of a vulnerable package: Remain vulnerable" +# preconditions: null +# depends: "install_package" +# body: +# tasks: +# - operation: install_package +# target: agent +# package: +# centos: +# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb +# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb +# ubuntu: +# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb +# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb +# windows: +# amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.7-win64.exe +# macos: +# amd64: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg +# arm64v8: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg - - operation: check_agent_vulnerability - target: agent - parameters: - alert_indexed: False - state_indice: False - vulnerability_data: - centos: - amd64: - - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.16-1PGDG.rhel7" - CVE: CVE-2022-2625 - arm64v8: - - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.15-1PGDG.rhel7" - CVE: CVE-2022-2625 - ubuntu: - amd64: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.6" - CVE: CVE-2023-2183 - arm64v8: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.6" - CVE: CVE-2023-2183 - windows: - amd64: - - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" - PACKAGE_VERSION: "3.0.7" - CVE: CVE-2019-12874 - macos: - amd64: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.1.0" - CVE: CVE-2022-21824 - arm64v8: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.1.0" - CVE: CVE-2022-21824 +# - operation: check_agent_vulnerability +# target: agent +# parameters: +# alert_indexed: False +# state_indice: False +# vulnerability_data: +# centos: +# amd64: +# - PACKAGE_NAME: "postgresql11" +# PACKAGE_VERSION: "11.16-1PGDG.rhel7" +# CVE: CVE-2022-2625 +# arm64v8: +# - PACKAGE_NAME: "postgresql11" +# PACKAGE_VERSION: "11.15-1PGDG.rhel7" +# CVE: CVE-2022-2625 +# ubuntu: +# amd64: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "8.5.6" +# CVE: CVE-2023-2183 +# arm64v8: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "8.5.6" +# CVE: CVE-2023-2183 +# windows: +# amd64: +# - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" +# PACKAGE_VERSION: "3.0.7" +# CVE: CVE-2019-12874 +# macos: +# amd64: +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "17.1.0" +# CVE: CVE-2022-21824 +# arm64v8: +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "17.1.0" +# CVE: CVE-2022-21824 -- case: "Upgrade of a vulnerable package: New vulnerability" - id: "upgrade_package_new_vulnerability" - description: "Upgrade of a vulnerable package that become vulnerable to another CVE" - preconditions: null - depends: "upgrade_package_remain_vulnerable" - body: - tasks: - - operation: install_package - target: agent - package: - centos: - amd64: [https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-x86_64/postgresql12-libs-12.13-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-x86_64/postgresql12-12.13-1PGDG.rhel7.x86_64.rpm] - arm64v8: [https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-aarch64/postgresql12-libs-12.13-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-aarch64/postgresql12-12.13-1PGDG.rhel7.aarch64.rpm] - ubuntu: - amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_amd64.deb - arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb - windows: - amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.8-win64.exe - macos: - amd64: https://nodejs.org/dist/v17.1.0/node-v18.0.0.pkg - arm64v8: https://nodejs.org/dist/v17.1.0/node-v18.0.0.pkg +# - case: "Upgrade of a vulnerable package: New vulnerability" +# id: "upgrade_package_new_vulnerability" +# description: "Upgrade of a vulnerable package that become vulnerable to another CVE" +# preconditions: null +# depends: "upgrade_package_remain_vulnerable" +# body: +# tasks: +# - operation: install_package +# target: agent +# package: +# centos: +# amd64: [https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-x86_64/postgresql12-libs-12.13-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-x86_64/postgresql12-12.13-1PGDG.rhel7.x86_64.rpm] +# arm64v8: [https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-aarch64/postgresql12-libs-12.13-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-aarch64/postgresql12-12.13-1PGDG.rhel7.aarch64.rpm] +# ubuntu: +# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_amd64.deb +# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb +# windows: +# amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.8-win64.exe +# macos: +# amd64: https://nodejs.org/dist/v17.1.0/node-v18.0.0.pkg +# arm64v8: https://nodejs.org/dist/v17.1.0/node-v18.0.0.pkg - - operation: check_agent_vulnerability - target: agent - parameters: - alert_indexed: False - state_indice: False - state: False - vulnerability_data: - centos: - amd64: - - PACKAGE_NAME: "postgresql12" - CVE: CVE-2022-41862 - arm64v8: - - PACKAGE_NAME: "postgresql12" - CVE: CVE-2022-41862 - ubuntu: - amd64: - - PACKAGE_NAME: "grafana" - CVE: CVE-2023-2183 - arm64v8: - - PACKAGE_NAME: "grafana" - CVE: CVE-2023-2183 - windows: - amd64: - - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" - CVE: CVE-2019-12874 - macos: - amd64: - - PACKAGE_NAME: "node" - CVE: CVE-2022-21824 - arm64v8: - - PACKAGE_NAME: "node" - CVE: CVE-2022-21824 +# - operation: check_agent_vulnerability +# target: agent +# parameters: +# alert_indexed: False +# state_indice: False +# state: False +# vulnerability_data: +# centos: +# amd64: +# - PACKAGE_NAME: "postgresql12" +# CVE: CVE-2022-41862 +# arm64v8: +# - PACKAGE_NAME: "postgresql12" +# CVE: CVE-2022-41862 +# ubuntu: +# amd64: +# - PACKAGE_NAME: "grafana" +# CVE: CVE-2023-2183 +# arm64v8: +# - PACKAGE_NAME: "grafana" +# CVE: CVE-2023-2183 +# windows: +# amd64: +# - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" +# CVE: CVE-2019-12874 +# macos: +# amd64: +# - PACKAGE_NAME: "node" +# CVE: CVE-2022-21824 +# arm64v8: +# - PACKAGE_NAME: "node" +# CVE: CVE-2022-21824 - - operation: check_agent_vulnerability - target: agent - parameters: - alert_indexed: False - state_indice: False - vulnerability_data: - centos: - amd64: - - PACKAGE_NAME: "postgresql12" - PACKAGE_VERSION: "12.13-1PGDG.rhel7" - CVE: CVE-2022-2625 - arm64v8: - - PACKAGE_NAME: "postgresql12" - PACKAGE_VERSION: "12.13-1PGDG.rhel7" - CVE: CVE-2022-2625 - ubuntu: - amd64: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "9.1.1" - CVE: CVE-2023-1387 - arm64v8: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "9.1.1s" - CVE: CVE-2023-1387 - windows: - amd64: - - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" - PACKAGE_VERSION: "3.0.8" - CVE: CVE-2019-18278 - macos: - amd64: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "18.0.0" - CVE: CVE-2023-38552 +# - operation: check_agent_vulnerability +# target: agent +# parameters: +# alert_indexed: False +# state_indice: False +# vulnerability_data: +# centos: +# amd64: +# - PACKAGE_NAME: "postgresql12" +# PACKAGE_VERSION: "12.13-1PGDG.rhel7" +# CVE: CVE-2022-2625 +# arm64v8: +# - PACKAGE_NAME: "postgresql12" +# PACKAGE_VERSION: "12.13-1PGDG.rhel7" +# CVE: CVE-2022-2625 +# ubuntu: +# amd64: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "9.1.1" +# CVE: CVE-2023-1387 +# arm64v8: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "9.1.1s" +# CVE: CVE-2023-1387 +# windows: +# amd64: +# - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" +# PACKAGE_VERSION: "3.0.8" +# CVE: CVE-2019-18278 +# macos: +# amd64: +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "18.0.0" +# CVE: CVE-2023-38552 - arm64v8: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "18.0.0" - CVE: CVE-2023-38552 - -# -------------- -# -# -- case: "Upgrade of a vulnerable package: Another vuln and maintain original vulnerability" - id: "upgrade_package_new_vulnerability_and_maintain" - description: "Upgrade of a vulnerable package that become vulnerable to another CVE and maintain the previous one" - preconditions: - tasks: - - operation: install_package - target: agent - package: - windows: - amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.10-win64.exe - - operation: check_agent_vulnerability - target: agent - parameters: - alert_indexed: FalseSee - state_indice: False - vulnerability_data: - amd64: - - PACKAGE_NAME: "vlc" - PACKAGE_VERSION: "3.0.10" - CVE: CVE-2023-47360 +# arm64v8: +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "18.0.0" +# CVE: CVE-2023-38552 + +# # -------------- +# # +# # +# - case: "Upgrade of a vulnerable package: Another vuln and maintain original vulnerability" +# id: "upgrade_package_new_vulnerability_and_maintain" +# description: "Upgrade of a vulnerable package that become vulnerable to another CVE and maintain the previous one" +# preconditions: +# tasks: +# - operation: install_package +# target: agent +# package: +# windows: +# amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.10-win64.exe +# - operation: check_agent_vulnerability +# target: agent +# parameters: +# alert_indexed: FalseSee +# state_indice: False +# vulnerability_data: +# amd64: +# - PACKAGE_NAME: "vlc" +# PACKAGE_VERSION: "3.0.10" +# CVE: CVE-2023-47360 - depends: "upgrade_package_remain_vulnerable" - body: - tasks: - - operation: install_package - target: agent - package: - centos: - amd64: [https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-x86_64/postgresql13-libs-13.00-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-x86_64/postgresql13-13.00-1PGDG.rhel7.x86_64.rpm] - arm64v8: [https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-aarch64/postgresql13-libs-13.00-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-aarch64/postgresql13-13.0-1PGDG.rhel7.aarch64.rpm] - ubuntu: - amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_amd64.deb - arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_arm64.deb - windows: - amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.11-win64.exe - macos: - amd64: https://nodejs.org/dist/v17.1.0/node-v18.3.0.pkg - arm64v8: https://nodejs.org/dist/v17.1.0/node-v18.3.0.pkg +# depends: "upgrade_package_remain_vulnerable" +# body: +# tasks: +# - operation: install_package +# target: agent +# package: +# centos: +# amd64: [https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-x86_64/postgresql13-libs-13.00-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-x86_64/postgresql13-13.00-1PGDG.rhel7.x86_64.rpm] +# arm64v8: [https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-aarch64/postgresql13-libs-13.00-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-aarch64/postgresql13-13.0-1PGDG.rhel7.aarch64.rpm] +# ubuntu: +# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_amd64.deb +# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_arm64.deb +# windows: +# amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.11-win64.exe +# macos: +# amd64: https://nodejs.org/dist/v17.1.0/node-v18.3.0.pkg +# arm64v8: https://nodejs.org/dist/v17.1.0/node-v18.3.0.pkg - - operation: check_agent_vulnerability - target: agent - parameters: - alert_indexed: False - state_indice: False - vulnerability_data: - centos: - amd64: - - PACKAGE_NAME: "postgresql13" - PACKAGE_VERSION: "13.00-1PGDG.rhel7" - CVE: CVE-2022-2625 - - PACKAGE_NAME: "postgresql13" - PACKAGE_VERSION: "13.00-1PGDG.rhel7" - CVE: CVE-2021-23222 - arm64v8: - - PACKAGE_NAME: "postgresql13" - PACKAGE_VERSION: "13.00-1PGDG.rhel7" - CVE: CVE-2022-2625 - - PACKAGE_NAME: "postgresql13" - PACKAGE_VERSION: "13.00-1PGDG.rhel7" - CVE: CVE-2021-23222 - ubuntu: - amd64: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "9.2.0" - CVE: CVE-2023-1387 - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "9.2.0" - CVE: CVE-2023-22462 - arm64v8: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "9.2.0" - CVE: CVE-2023-1387 - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "9.2.0" - CVE: CVE-2023-22462 - windows: - amd64: - - PACKAGE_NAME: "vlc" - PACKAGE_VERSION: "3.0.11" - CVE: CVE-2023-47360 - - PACKAGE_NAME: "vlc" - PACKAGE_VERSION: "3.0.11" - CVE: CVE-2021-25801 - macos: - amd64: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "18.3.0" - CVE: CVE-2023-38552 - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "18.3.0" - CVE: CVE-2023-32559 - arm64v8: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "18.3.0" - CVE: CVE-2023-38552 - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "18.3.0" - CVE: CVE-2023-32559 +# - operation: check_agent_vulnerability +# target: agent +# parameters: +# alert_indexed: False +# state_indice: False +# vulnerability_data: +# centos: +# amd64: +# - PACKAGE_NAME: "postgresql13" +# PACKAGE_VERSION: "13.00-1PGDG.rhel7" +# CVE: CVE-2022-2625 +# - PACKAGE_NAME: "postgresql13" +# PACKAGE_VERSION: "13.00-1PGDG.rhel7" +# CVE: CVE-2021-23222 +# arm64v8: +# - PACKAGE_NAME: "postgresql13" +# PACKAGE_VERSION: "13.00-1PGDG.rhel7" +# CVE: CVE-2022-2625 +# - PACKAGE_NAME: "postgresql13" +# PACKAGE_VERSION: "13.00-1PGDG.rhel7" +# CVE: CVE-2021-23222 +# ubuntu: +# amd64: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "9.2.0" +# CVE: CVE-2023-1387 +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "9.2.0" +# CVE: CVE-2023-22462 +# arm64v8: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "9.2.0" +# CVE: CVE-2023-1387 +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "9.2.0" +# CVE: CVE-2023-22462 +# windows: +# amd64: +# - PACKAGE_NAME: "vlc" +# PACKAGE_VERSION: "3.0.11" +# CVE: CVE-2023-47360 +# - PACKAGE_NAME: "vlc" +# PACKAGE_VERSION: "3.0.11" +# CVE: CVE-2021-25801 +# macos: +# amd64: +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "18.3.0" +# CVE: CVE-2023-38552 +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "18.3.0" +# CVE: CVE-2023-32559 +# arm64v8: +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "18.3.0" +# CVE: CVE-2023-38552 +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "18.3.0" +# CVE: CVE-2023-32559 -# Updating a vulnerable package that ceases to be vulnerable +# # Updating a vulnerable package that ceases to be vulnerable -# Deleting a vulnerable package +# # Deleting a vulnerable package -# Delete non-vulnerable package +# # Delete non-vulnerable package -# Install vulnerable package again and remove it +# # Install vulnerable package again and remove it -- case: "Removal of vulnerable package" - id: "remove_vulnerable_package" - description: "Installation of a vulnerable package" - preconditions: - tasks: - - operation: install_package - target: agent - package: - centos: - amd64: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-libs-11.15-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-11.5-1PGDG.rhel7.x86_64.rpm] - arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.15-2PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.15-2PGDG.rhel7.aarch64.rpm] - ubuntu: - amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb - arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb - windows: - amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe - macos: - amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg - arm64v8: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg +# - case: "Removal of vulnerable package" +# id: "remove_vulnerable_package" +# description: "Installation of a vulnerable package" +# preconditions: +# tasks: +# - operation: install_package +# target: agent +# package: +# centos: +# amd64: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-libs-11.15-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-11.5-1PGDG.rhel7.x86_64.rpm] +# arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.15-2PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.15-2PGDG.rhel7.aarch64.rpm] +# ubuntu: +# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb +# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb +# windows: +# amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe +# macos: +# amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg +# arm64v8: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg - - operation: check_agent_vulnerability - target: agent - parameters: - alert_indexed: False - state_indice: False - vulnerability_data: - centos: - amd64: - - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.15-1PGDG.rhel7" - CVE: CVE-2022-2625 - arm64v8: - - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.15-1PGDG.rhel7" - CVE: CVE-2022-2625 - ubuntu: - amd64: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" - CVE: CVE-2023-2183 - arm64v8: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" - CVE: CVE-2023-2183 - windows: - amd64: - - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" - PACKAGE_VERSION: "3.0.6" - CVE: CVE-2019-12874 - macos: - amd64: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.0.1" - CVE: CVE-2022-21824 - arm64v8: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.0.1" - CVE: CVE-2022-21824 - body: - tasks: - - operation: remove_package - target: agent - package: - centos: - amd64: nmap - arm64v8: postgresql11* - ubuntu: - amd64: grafana* - arm64v8: grafana* - windows: - amd64: vlc - macos: - amd64: node* +# - operation: check_agent_vulnerability +# target: agent +# parameters: +# alert_indexed: False +# state_indice: False +# vulnerability_data: +# centos: +# amd64: +# - PACKAGE_NAME: "postgresql11" +# PACKAGE_VERSION: "11.15-1PGDG.rhel7" +# CVE: CVE-2022-2625 +# arm64v8: +# - PACKAGE_NAME: "postgresql11" +# PACKAGE_VERSION: "11.15-1PGDG.rhel7" +# CVE: CVE-2022-2625 +# ubuntu: +# amd64: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "8.5.5" +# CVE: CVE-2023-2183 +# arm64v8: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "8.5.5" +# CVE: CVE-2023-2183 +# windows: +# amd64: +# - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" +# PACKAGE_VERSION: "3.0.6" +# CVE: CVE-2019-12874 +# macos: +# amd64: +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "17.0.1" +# CVE: CVE-2022-21824 +# arm64v8: +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "17.0.1" +# CVE: CVE-2022-21824 +# body: +# tasks: +# - operation: remove_package +# target: agent +# package: +# centos: +# amd64: nmap +# arm64v8: postgresql11* +# ubuntu: +# amd64: grafana* +# arm64v8: grafana* +# windows: +# amd64: vlc +# macos: +# amd64: node* - - operation: check_agent_vulnerability - target: agent - parameters: - alert_indexed: False - state_indice: False - state: False - vulnerability_data: - centos: - amd64: - - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.15-1PGDG.rhel7" - CVE: CVE-2022-2625 - arm64v8: - - PACKAGE_NAME: "postgresql11" - PACKAGE_VERSION: "11.15-1PGDG.rhel7" - CVE: CVE-2022-2625 - ubuntu: - amd64: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" - CVE: CVE-2023-2183 - arm64v8: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" - CVE: CVE-2023-2183 - windows: - amd64: - - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" - PACKAGE_VERSION: "3.0.6" - CVE: CVE-2019-12874 - macos: - amd64: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.0.1" - CVE: CVE-2022-21824 - arm64v8: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.0.1" - CVE: CVE-2022-21824 +# - operation: check_agent_vulnerability +# target: agent +# parameters: +# alert_indexed: False +# state_indice: False +# state: False +# vulnerability_data: +# centos: +# amd64: +# - PACKAGE_NAME: "postgresql11" +# PACKAGE_VERSION: "11.15-1PGDG.rhel7" +# CVE: CVE-2022-2625 +# arm64v8: +# - PACKAGE_NAME: "postgresql11" +# PACKAGE_VERSION: "11.15-1PGDG.rhel7" +# CVE: CVE-2022-2625 +# ubuntu: +# amd64: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "8.5.5" +# CVE: CVE-2023-2183 +# arm64v8: +# - PACKAGE_NAME: "grafana" +# PACKAGE_VERSION: "8.5.5" +# CVE: CVE-2023-2183 +# windows: +# amd64: +# - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" +# PACKAGE_VERSION: "3.0.6" +# CVE: CVE-2019-12874 +# macos: +# amd64: +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "17.0.1" +# CVE: CVE-2022-21824 +# arm64v8: +# - PACKAGE_NAME: "node" +# PACKAGE_VERSION: "17.0.1" +# CVE: CVE-2022-21824 -# ---------------------------------------------------------------------------------- +# # ---------------------------------------------------------------------------------- # - case: "Updating a vulnerable package that remains vulnerable to the same CVE" # id: "update_vuln_package_vuln_remain" diff --git a/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml b/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml index 9be7ff1abb..32edc6424e 100644 --- a/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml +++ b/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml @@ -21,4 +21,4 @@ - disabled: value: 'no' - interval: - value: '6m' + value: '1m' diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index a1afbca40f..4893a92fef 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -23,6 +23,7 @@ def test_example(host_manager): import json import datetime import os +import logging import shutil import uuid from py.xml import html @@ -30,11 +31,53 @@ def test_example(host_manager): from wazuh_testing.tools.system import HostManager from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations +from wazuh_testing.end_to_end.logs import get_hosts_logs catalog = list() results = dict() + +def collect_evidences(test_name, host_manager, evidences) -> None: + """ + Collect evidences for the test + + Args: + request: Pytest request object + host_manager: An instance of the HostManager class containing information about hosts. + results: An instance of the SyscollectorScansTestsResults class containing the results of the tests + """ + current_dir = os.path.dirname(__file__) + vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") + tests_evidences_directory = os.path.join(str(vulnerability_detector_logs_dir), str(test_name)) + + if evidences: + logging.info(f"Collecting custom evidences for {test_name}") + if test_name in evidences.keys(): + evidences_to_collect = evidences[test_name] + for evidence, content in evidences_to_collect.items(): + if content is not None and content != [] and content != {}: + evidence_file = os.path.join(tests_evidences_directory, evidence + ".log") + with open(evidence_file, 'w') as evidence_file: + if content.__class__ == dict: + try: + json.dump(content, evidence_file, indent=4) + except TypeError: + logging.critical(f"Error dumping {evidence} to file") + pass + else: + evidence_file.write(str(content)) + + logging.info(f"Collecting generic evidences for {test_name}") + environment_logs = get_hosts_logs(host_manager) + + for host in environment_logs.keys(): + host_logs_name_evidence = host + "_ossec.log" + evidence_file = os.path.join(tests_evidences_directory, host_logs_name_evidence) + with open(evidence_file, 'w') as evidence_file: + evidence_file.write(environment_logs[host]) + + @pytest.fixture(scope='session') def host_manager(request): """Fixture for creating a HostManager instance. @@ -79,7 +122,7 @@ def handle_logs(): os.makedirs(logs_dir, exist_ok=True) yield - + shutil.rmtree(logs_dir, ignore_errors=True) @@ -87,17 +130,17 @@ def handle_logs(): @pytest.hookimpl(hookwrapper=True, tryfirst=True) def pytest_runtest_setup(item): item_name = item._request.node.name - logs_dir = os.path.join(os.curdir, 'logs') + item_path = item._request.node.path + + logs_dir = os.path.join(os.path.dirname(item_path), 'logs') logs_test_dir = os.path.join(logs_dir, item_name) os.makedirs(logs_test_dir, exist_ok=True) - item_log_path = os.path.join('logs', item_name) - config = item.config logging_plugin = config.pluginmanager.get_plugin("logging-plugin") - filename = os.path.join(item_log_path, item._request.node.name + ".log") + filename = os.path.join(logs_test_dir, item._request.node.name + ".log") logging_plugin.set_log_path(str(filename)) @@ -105,7 +148,6 @@ def pytest_runtest_setup(item): def pytest_html_results_table_header(cells): - print("Using filename as logging path") cells.insert(4, html.th('Tier', class_='sortable tier', col='tier')) cells.insert(3, html.th('Markers')) cells.insert(2, html.th('Description')) @@ -188,21 +230,27 @@ def pytest_runtest_makereport(item, call): extra.append(pytest_html.extras.json(arguments, name="Test arguments")) # Extra files to be added in 'Links' section - logs_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'logs', item._request.node.name) + logs_path = os.path.join(os.path.dirname(item._request.node.path), 'logs', item._request.node.name) - files = [os.path.join(logs_path, f) for f in os.listdir(logs_path) if - os.path.isfile(os.path.join(logs_path, f))] + if 'host_manager' in item.funcargs: + evidences = None + if 'get_results' in item.funcargs: + evidences = item.funcargs['get_results'] - import pdb; pdb.set_trace() + collect_evidences(item._request.node.name, item.funcargs['host_manager'], evidences) + + files = [f for f in os.listdir(logs_path) if + os.path.isfile(os.path.join(logs_path, f))] for filepath in files: - if os.path.isfile(filepath): - with open(filepath, mode='r', errors='replace') as f: + fullpath = os.path.join(logs_path, filepath) + if os.path.isfile(fullpath): + with open(fullpath, mode='r', errors='replace') as f: content = f.read() extra.append(pytest_html.extras.text(content, name=os.path.split(filepath)[-1])) - if not report.passed and not report.skipped: - report.extra = extra + # if not report.passed and not report.skipped: + report.extra = extra if report.longrepr is not None and report.longreprtext.split()[-1] == 'XFailed': results[report.location[0]]['xfailed'] += 1 @@ -221,7 +269,7 @@ class td(html.td): style = html.Style(padding='5px', border='1px solid #E6E6E6', text_align='left') class th(html.th): - style = html.Style(padding='5px', border='1px solid #E6E6E6', text_align='left', font_weight='bold') + style = html.Style(padding='5px', border='1px solid #E6E6E6', text_align='left', font_weight='bold', ) def pytest_html_results_summary(prefix, summary, postfix): @@ -244,3 +292,11 @@ def pytest_html_results_summary(prefix, summary, postfix): SummaryTable.td(v['error']), ]) ) for k, v in results.items()])]) + + +@pytest.hookimpl(tryfirst=True) +def pytest_configure(config): + if not config.option.css: + current_dir = os.path.dirname(__file__) + config.option.css = [os.path.join(current_dir, + '../../../deps/wazuh_testing/wazuh_testing/reporting/style.css')] diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index d19646aacd..1f66bd71b8 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -53,13 +53,13 @@ from wazuh_testing.end_to_end.configuration import backup_configurations, restore_configuration, configure_environment from wazuh_testing.end_to_end.logs import truncate_remote_host_group_files from wazuh_testing.end_to_end.waiters import wait_until_vd_is_updated -from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_all_agent, monitoring_events_multihost +from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs, monitoring_events_multihost from wazuh_testing.end_to_end.regex import get_event_regex from wazuh_testing.end_to_end.indexer_api import get_indexer_values from wazuh_testing.tools.configuration import load_configuration_template from wazuh_testing.tools.system import HostManager from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations -from wazuh_testing.end_to_end.vulnerability_detector import get_alerts_by_agent +from wazuh_testing.end_to_end.vulnerability_detector import get_alerts_by_agent, get_indexed_vulnerabilities_by_agent from wazuh_testing.modules.syscollector import TIMEOUT_SYSCOLLECTOR_SCAN @@ -74,27 +74,118 @@ 'agent': os.path.join(configurations_dir, 'agent.yaml') } vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") +TIMEOUT_PER_AGENT_VULNERABILITY_SCAN = 200 -def collect_evidences(host_manager, test_name, evidences_to_collect): +class SyscollectorScansTestsResults: + """Class to store the results of the Syscollector Scans tests + + Attributes: + checks (dict): Dictionary with the results of the checks + evidences (dict): Dictionary with the evidences of the checks """ - Collect evidences for the test + def __init__(self): + self.checks = { + 'syscollector_first_scan': True, + 'first_scan_all_agents_alerts_detected': True, + 'first_scan_all_agents_alerts_vulnerabilities': True, + 'first_scan_all_agents_index_detected': True, + 'first_scan_all_agents_index_vulnerabilities': True, + 'index_state_consistent': True, + 'syscollector_second_scan': True, + 'vulnerabilities_equal_between_scans': True, + 'vulnerabilities_equal_between_scans_indexer': True + } + + self.vulnerabilities_alerts_first_scan = {} + self.vulnerabilities_alerts_second_scan = {} + self.vulnerabilities_index_first_scan = {} + self.vulnerabilities_index_second_scan = {} + + self.timestamp_first_scan = None + self.timestamp_second_scan = None + + self.evidences = { + 'test_syscollector_first_scan': { + 'agents_not_scanned_first_scan': [], + 'testing_evidences_gathering': ["Testing"] + }, + 'test_syscollector_first_scan_alerts': { + 'agents_not_detected_vulnerabilities': [], + 'agents_not_scanned_first_scan': [], + }, + 'test_syscollector_first_scan_index': { + 'agents_not_detected_index_vulnerabilities': [], + 'agents_not_scanned_index': [], + }, + 'tests_syscollector_vulnerabilities_index_alerts_consistency': { + 'alerts_not_in_states': [], + 'states_not_in_alerts': [], + 'alerts_first_scan': [], + 'states_first_scan': [], + }, + 'test_syscollector_second_scan': { + 'agents_not_scanned_second_scan': [], + }, + 'tests_syscollector_first_second_scan_consistency_alerts': { + "vulnerabilities_not_equal_between_scans_alerts": [], + "agents_different_between_scans": [], + }, + 'tests_syscollector_first_second_scan_consistency_index': { + 'vulnerabilities_not_equal_between_scans_indexer': [], + } + } + + def summary(self): + """Print a summary of the results of the tests""" + if any(self.checks.values()): + if not self.checks['first_scan_all_agents_alerts_detected']: + logger.critical("Not all agents generated vulnerabilities. Missing agents:" + f"{self.evidences['agents_not_detected_vulnerabilities']}") + + if not self.checks['first_scan_all_agents_alerts_vulnerabilities']: + logger.critical("Not all agents generated vulnerabilities. Missing agents:" + f"{self.evidences['agents_not_detected_vulnerabilities']}") + + if not self.checks['first_scan_all_agents_index_detected']: + logger.critical("Not all agents were scanned in the index. Missing agents:" + f"{self.evidences['agents_not_scanned_index']}") + + if not self.checks['first_scan_all_agents_index_vulnerabilities']: + logger.critical("Not all agents generated vulnerabilities in the index. Missing agents:" + f"{self.evidences['agents_not_detected_index_vulnerabilities']}") + + if not self.checks['index_state_consistent']: + logger.critical("Index state is not consistent with the alerts." + "Inconsistencies can be found in the logs directory") + + if not self.checks['syscollector_second_scan']: + logger.critical("Syscollector scan not started in the following agents:" + f"{self.evidences['agents_syscollector_second_scan_not_started']}") + + if not self.checks['vulnerabilities_equal_between_scans']: + logger.critical("The number of vulnerabilities alerts is not the same between scans." + "Incosistencies can be found in the logs directory") + + if not self.checks['vulnerabilities_equal_between_scans_indexer']: + logger.critical("The number of vulnerabilities in the index is not the same between scans." + "Incosistencies can be found in the logs directory") + + +@pytest.fixture(scope='module') +def results(): + """ + Handle the results of the tests + + Args: + host_manager: An instance of the HostManager class containing information about hosts. """ - evidences_directory = os.path.join('..', 'logs', test_name) - print(evidences_directory) + initial_tests_results = SyscollectorScansTestsResults() - for evidence, content in evidences_to_collect.items(): - evidence_file = os.path.join(evidences_directory, evidence + ".log") - with open(evidence_file, 'w') as evidence_file: - if content.__class__ == dict: - try: - json.dump(content, evidence_file, indent=4) - except TypeError: - logger.critical(f"Error dumping {evidence} to file") - pass - else: - evidence_file.write(str(content)) + yield initial_tests_results + + initial_tests_results.summary() def load_vulnerability_detector_configurations(host_manager): @@ -201,7 +292,6 @@ def check_vuln_state_consistency(vulnerabilities_alerts, vulnerabilities_states) alerts_vulnerabilities = [] indices_vulnerabilities = [] - print(vulnerabilities_alerts.__class__) for alert in list(vulnerabilities_alerts.values())[0]: alert_agent = alert['_source']['agent']['name'] alert_cve = alert['_source']['data']['vulnerability']['cve'] @@ -214,7 +304,7 @@ def check_vuln_state_consistency(vulnerabilities_alerts, vulnerabilities_states) 'package_version': alert_package_version }) - for vulnerabilities_state in vulnerabilities_states['hits']['hits']: + for vulnerabilities_state in list(vulnerabilities_states.values())[0]: state_agent = vulnerabilities_state['_source']['agent']['name'] state_cve = vulnerabilities_state['_source']['vulnerability']['id'] state_package_name = vulnerabilities_state['_source']['package']['name'] @@ -232,13 +322,13 @@ def check_vuln_state_consistency(vulnerabilities_alerts, vulnerabilities_states) logger.critical(f"Alerts: {len(alerts_vulnerabilities)}") logger.critical(f"States: {len(indices_vulnerabilities)}") - alerts_not_in_state = [] + alerts_not_in_states = [] states_not_in_alerts = [] # Check that all alerts are in the index for alert in alerts_vulnerabilities: if alert not in indices_vulnerabilities: - alerts_not_in_state.append(alert) + alerts_not_in_states.append(alert) # Check that all index states are in the alerts for state in indices_vulnerabilities: @@ -246,270 +336,292 @@ def check_vuln_state_consistency(vulnerabilities_alerts, vulnerabilities_states) states_not_in_alerts.append(state) return { - 'alerts_not_in_state': alerts_not_in_state, + 'alerts_not_in_states': alerts_not_in_states, 'states_not_in_alerts': states_not_in_alerts } -@pytest.mark.dependency() @pytest.mark.filterwarnings('ignore::urllib3.exceptions.InsecureRequestWarning') -def test_syscollector_initial_scans(request, host_manager, setup_vulnerability_tests): - """ - description: Validates the initiation of Syscollector scans across all agents in the environment. - - This test ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment. - The Agent's Vulnerability Indexer index is expected to be updated with the detected vulnerabilities. - - tier: 0 - - parameters: - - host_manager: - type: fixture - brief: Get the host manager of the environment - - setup_vulnerability_tests: - type: fixture - brief: Setup the environment to proceed with the testing - - assertions: - - Verify that syscollector scan is started after agent start - - Verify that Vulnerability scan is performed for all the agent - - Verify that vulnerabilities are generated for each agent (Check vulnerabilities using Wazuh API) - - Verify that Agent's Vulnerabilities index is updated and is conssitent with the API results - - Verify that second scan is performed in expected timeframe - - Verify that no new vulnearbilities are detected since the first scan - - cases: None - - tags: - - syscollector - - vulnerability_detector - """ - - tests_results = { - 'checks': { - 'syscollector_first_scan': True, - 'all_agents_scanned_first_scan': True, - 'all_agents_vulnerabilities_detected': True, - 'index_state_consistent': True, - 'syscollector_second_scan': True, - 'vulnerabilities_equal_between_scans': True, - 'vulnerabilities_equal_between_scans_indexer': True - }, - 'evidences': { - 'agents_not_scanned_first_scan': [], - 'agents_syscollector_scan_not_started': [], - 'agents_syscollector_second_scan_not_started': [], - 'agents_not_detected_vulnerabilities': [], - 'index_state_inconsistencies': {}, - 'vulnerabilities_not_equal_between_scans': [], - 'vulnerabilities_not_equal_between_scans_indexer': [], - 'agents_different_between_scans': [] - } - } - TIMEOUT_AGENTS_VULNERABILITY_SCAN = 200 +class TestInitialScans(): + results = SyscollectorScansTestsResults() - # Monitor for the first Syscollector scan in all the agents - logger.critical("Monitoring Syscollector First Scan") - monitoring_data = generate_monitoring_logs_all_agent(host_manager, - [get_event_regex({'event': 'syscollector_scan_start'}), - get_event_regex({'event': 'syscollector_scan_end'})], - [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) + @pytest.fixture(scope='class') + def get_results(self): + return self.results - elements_not_found = monitoring_events_multihost(host_manager, monitoring_data) + def test_syscollector_first_scan(self, host_manager, setup_vulnerability_tests, get_results): + """ + description: Validates the initiation of Syscollector scans across all agents in the environment. - if any(elements_not_found.values()): - tests_results['checks']['syscollector_first_scan'] = False - - for element in elements_not_found: - if elements_not_found[element]: - tests_results['evidences']['agents_syscollector_scan_not_started'].append(element.keys()) - - logging.critical(f"Syscollector scan not started in the following agents:" - f"{tests_results['evidences']['agents_syscollector_scan_not_started']}." - 'Continuing with the test') - - # Truncate agents logs to detect second scan - logger.critical("Truncating agent's logs") - truncate_remote_host_group_files(host_manager, 'agent', 'logs') - - # Wait until all agents has been scanned - logger.critical("Waiting until agent's VD scan is over") - # Replace with relevan event. For now timeout - time.sleep(TIMEOUT_AGENTS_VULNERABILITY_SCAN) - - logger.critical("Check agent's vulnerabilities") - alerts_first_scan = get_indexer_values(host_manager, - greater_than_timestamp=setup_vulnerability_tests)['hits']['hits'] - vuln_alerts_by_agent_first_scan = get_alerts_by_agent(alerts_first_scan, 'CVE.*? affects.*"?') - - - logger.critical(f"List of alerts first scan: {vuln_alerts_by_agent_first_scan}") - - - # Check that it has been triggered vulnerability detector alerts - logger.critical("Checking that all agents has been scanned") - for agent in host_manager.get_group_hosts('agent'): - if agent not in vuln_alerts_by_agent_first_scan.keys(): - logger.critical(f"Agent {agent} has not been scanned. Continuing with remaining agents") - tests_results['checks']['all_agents_vulnerabilities_detected'] = False - tests_results['evidences']['agents_not_scanned_first_scan'].append(agent) - - if len(vuln_alerts_by_agent_first_scan[agent]) == 0: - logger.critical(f"Agent {agent} has not generated vulnerabilities. Continuing with remaining agents") - tests_results['checks']['all_agents_vulnerabilities_detected'] = False - tests_results['evidences']['agents_not_detected_vulnerabilities'].append(agent) - - # Check vulnerabilities in the index - logger.critical("Checking vulnerabilities in the index") - index_state_first_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', - greater_than_timestamp=setup_vulnerability_tests) - - logger.critical(f"List of indexer indices first scan: {vuln_alerts_by_agent_first_scan}") - - # Check that the index is consistent with the alerts - logging.critical("Checking index state consistency") - tests_results['evidences']['index_state_inconsistencies'] = \ - check_vuln_state_consistency(vuln_alerts_by_agent_first_scan, - index_state_first_scan) - - if tests_results['evidences']['index_state_inconsistencies']['alerts_not_in_state'] or \ - tests_results['evidences']['index_state_inconsistencies']['states_not_in_alerts']: - logger.critical("Index state is not consistent with the alerts") - tests_results['checks']['index_state_consistent'] = False - - # Truncate manager_logs to prevent trigger wait_until_vuln_scan_agents_finished wrongly - logger.critical("Truncating manager logs") - truncate_remote_host_group_files(host_manager, 'manager', 'logs') - - logger.critical("Monitoring Second Syscollector scan") - # The Agent's syscollector second scan is run - monitoring_data = generate_monitoring_logs_all_agent(host_manager, - [get_event_regex({'event': 'syscollector_scan_start'}), - get_event_regex({'event': 'syscollector_scan_end'})], - [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN]) - - elements_not_found = monitoring_events_multihost(host_manager, monitoring_data) - - logger.critical("Checking that all agents has been scanned") - if any(elements_not_found.values()): - tests_results['checks']['syscollector_second_scan'] = False - - for element in elements_not_found: - if elements_not_found[element]: - tests_results['evidences']['agents_syscollector_second_scan_not_started'].append(element.keys()) - - logging.critical(f"Syscollector scan not started in the following agents:" - f"{tests_results['evidences']['agents_syscollector_second_scan_not_started']}." - 'Continuing with the test') - - logger.critical("Waiting until agent's VD scan is over") - time.sleep(60) - - logger.critical("Checking vulnerabilities in the second scan") - alerts_second_scan = get_indexer_values(host_manager, - greater_than_timestamp=setup_vulnerability_tests)['hits']['hits'] - - vuln_alerts_by_agent_second_scan = get_alerts_by_agent(alerts_second_scan, 'CVE.*? affects.*"?') - - alert_present_in_first_scan_not_in_second_scan = [] - alert_present_in_second_scan_not_in_second_scan = [] - - if len(vuln_alerts_by_agent_second_scan) != len(vuln_alerts_by_agent_first_scan): - tests_results['checks']['vulnerabilities_equal_between_scans'] = False - logger.critical("The number of vulnerabilities is not the same between scans") - logger.critical(f"First scan: {len(vuln_alerts_by_agent_first_scan)}") - logger.critical(f"Second scan: {len(vuln_alerts_by_agent_second_scan)}") - - logger.critical("Checking that all agents has been scanned") - # Check if the number of agents for each scan is the same - if list(vuln_alerts_by_agent_first_scan.keys()) != list(vuln_alerts_by_agent_second_scan.keys()): - tests_results['checks']['vulnerabilities_equal_between_scans'] = False - logging.critical(f"Agents with vulnerabilities changed between scans: " - f"First scan: {list(vuln_alerts_by_agent_first_scan.keys())}" - f"Second scan: {list(vuln_alerts_by_agent_second_scan.keys())}") - tests_results['evidences']['agents_different_between_scans'] = \ - list(set(list(vuln_alerts_by_agent_first_scan.keys())) ^ set(list(vuln_alerts_by_agent_second_scan.keys()))) - - logger.critical("Checking that all agents has been scanned") - # Check if the number of vulnerabilities for each agent is the same - for agent in vuln_alerts_by_agent_second_scan.keys(): - for alert in list(vuln_alerts_by_agent_second_scan[agent][0]): - alert_present_in_second_scan_not_in_second_scan.append(alert) - - for alert in list(vuln_alerts_by_agent_first_scan[agent][0]): - if alert in alert_present_in_first_scan_not_in_second_scan: - alert_present_in_first_scan_not_in_second_scan.remove(alert) - - logger.critical("Checking that all agents has been scanned") - if alert_present_in_first_scan_not_in_second_scan or alert_present_in_second_scan_not_in_second_scan: - tests_results['checks']['vulnerabilities_equal_between_scans'] = False - tests_results['evidences']['vulnerabilities_not_equal_between_scans'] = { - 'alert_present_in_first_scan_not_in_second_scan': alert_present_in_first_scan_not_in_second_scan, - 'alert_present_in_second_scan_not_in_second_scan': alert_present_in_second_scan_not_in_second_scan - } - - logger.critical("Checking vulnerabilities in the second scan") - index_state_second_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', - greater_than_timestamp=setup_vulnerability_tests) - logger.critical(f"List of indices second scan: {vuln_alerts_by_agent_second_scan}") + This test ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment. + The Agent's Vulnerability Indexer index is expected to be updated with the detected vulnerabilities. - if index_state_second_scan != index_state_first_scan: - tests_results['checks']['vulnerabilities_equal_between_scans_indexer'] = False - tests_results['evidences']['vulnerabilities_not_equal_between_scans_indexer'] = { - 'index_state_first_scan': index_state_first_scan, - 'index_state_second_scan': index_state_second_scan - } + tier: 0 + + parameters: + - host_manager: + type: fixture + brief: Get the host manager of the environment + - setup_vulnerability_tests: + type: fixture + brief: Setup the environment to proceed with the testing + + assertions: + - Verify that syscollector scan is started after agent start + - Verify that Vulnerability scan is performed for all the agent + - Verify that vulnerabilities are generated for each agent (Check vulnerabilities using Wazuh API) + - Verify that Agent's Vulnerabilities index is updated and is conssitent with the API results + - Verify that second scan is performed in expected timeframe + - Verify that no new vulnearbilities are detected since the first scan + + cases: None + + tags: + - syscollector + - vulnerability_detector + """ + test_fail_message = "" + results = get_results + test_name = 'test_syscollector_first_scan' + + logger.critical("Monitoring Syscollector First Scan") + list_hosts = host_manager.get_group_hosts('agent') + monitoring_data = generate_monitoring_logs(host_manager, + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], + list_hosts) + + monitoring_results = monitoring_events_multihost(host_manager, monitoring_data) + + logger.critical(f"Value of monitoring results is: {monitoring_results}") + + for agent in monitoring_results: + if monitoring_results[agent]['not_found']: + results.checks['syscollector_first_scan'] = False + results.evidences[test_name]['agents_not_scanned_first_scan'].append(agent) + test_fail_message += "Syscollector scan not started in the following agents:" \ + f"{results.evidences['agents_not_scanned_first_scan']}. " \ + 'Continuing with the test' + + if not results.checks['syscollector_first_scan']: + pytest.fail(test_fail_message) + + def test_syscollector_first_scan_alerts(self, host_manager, setup_vulnerability_tests, get_results): + results = get_results + test_name = 'test_syscollector_first_scan_alerts' + + agents_to_check = results.evidences['test_syscollector_first_scan_alerts']['agents_not_scanned_first_scan'] + if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): + pytest.skip("Syscollector scan not started in any agent. Skipping test") + + # Wait until all agents has been scanned + logger.critical("Waiting until agent's VD scan is over") + # Replace with relevan event. For now timeout + time.sleep(TIMEOUT_PER_AGENT_VULNERABILITY_SCAN) + + logger.critical("Check agent's vulnerabilities") + alerts_first_scan = get_indexer_values(host_manager, + greater_than_timestamp=setup_vulnerability_tests)['hits']['hits'] + vuln_alerts_by_agent_first_scan = get_alerts_by_agent(alerts_first_scan, 'CVE.*? affects.*"?') + + results.vulnerabilities_alerts_first_scan = vuln_alerts_by_agent_first_scan + + # Check that it has been triggered vulnerability detector alerts + logger.critical("Checking that all agents has been scanned") + for agent in agents_to_check: + if agent not in list(vuln_alerts_by_agent_first_scan.keys()): + logger.critical(f"Agent {agent} has not been scanned. Continuing with remaining agents") + results.checks['first_scan_all_agents_alerts_detected'] = False + results[test_name]['agents_not_scanned_first_scan'].append(agent) + + if len(vuln_alerts_by_agent_first_scan[agent]) == 0: + logger.critical(f"Agent {agent} has not generated vulnerabilities. Continuing with remaining agents") + results.checks['first_scan_all_agents_alerts_vulnerabilities'] = False + results[test_name]['agents_not_detected_vulnerabilities'].append(agent) + + if not results.checks['first_scan_all_agents_alerts_detected'] or not \ + results.checks['first_scan_all_agents_alerts_vulnerabilities']: + pytest.fail("Test failed. Check logs for more information") + + + def test_syscollector_first_scan_index(self, host_manager, setup_vulnerability_tests, get_results): + results = get_results + test_name = 'test_syscollector_first_scan_index' + + agents_to_check = results.evidences['test_syscollector_first_scan_alerts']['agents_not_scanned_first_scan'] + if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): + pytest.skip("Syscollector scan not started in any agent. Skipping test") + + # Check vulnerabilities in the index + logger.critical("Checking vulnerabilities in the index") + index_state_first_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', + greater_than_timestamp=setup_vulnerability_tests) + index_vulnerabilities_by_agent_first_scan = get_indexed_vulnerabilities_by_agent(index_state_first_scan) + + results.vulnerabilities_index_first_scan = index_vulnerabilities_by_agent_first_scan + + logger.critical("Checking that all agents has been scanned and generated vulnerabilities in the index") + for agent in agents_to_check: + if agent not in list(index_vulnerabilities_by_agent_first_scan.keys()): + logger.critical(f"Agent {agent} has not been scanned. Continuing with remaining agents") + results.checks['first_scan_all_agents_index_detected'] = False + results[test_name]['agents_not_scanned_index'].append(agent) + + if len(index_vulnerabilities_by_agent_first_scan[agent]) == 0: + logger.critical(f"Agent {agent} has not generated vulnerabilities. Continuing with remaining agents") + results.checks['first_scan_all_agents_index_vulnerabilities'] = False + results[test_name]['agets_not_detected_index_vulnerabilities'].append(agent) + + if not results.checks['first_scan_all_agents_index_detected'] or not \ + results.checks['first_scan_all_agents_index_vulnerabilities']: + pytest.fail("Test failed. Check logs for more information") + + def tests_syscollector_vulnerabilities_index_alerts_consistency(self, host_manager, + setup_vulnerability_tests, get_results): + results = get_results + test_name = 'tests_syscollector_vulnerabilities_index_alerts_consistency' + + agents_to_check = results.evidences['test_syscollector_first_scan_alerts']['agents_not_scanned_first_scan'] + if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): + pytest.skip("Syscollector scan not started in any agent. Skipping test") + + # Check that the index is consistent with the alerts + logging.critical("Checking index state consistency") + inconsistencies_between_alerts_indexer = \ + check_vuln_state_consistency(results.vulnerabilities_alerts_first_scan, + results.vulnerabilities_index_first_scan) + + results.evidences[test_name]['alerts_not_in_states'] = \ + inconsistencies_between_alerts_indexer['alerts_not_in_states'] + results.evidences[test_name]['states_not_in_alerts'] = \ + inconsistencies_between_alerts_indexer['states_not_in_alerts'] + + if len(results.evidences[test_name]['alerts_not_in_states']) > 0 or \ + len(results.evidences[test_name]['states_not_in_alerts']) > 0: + logger.critical("Index state is not consistent with the alerts") + results.checks['index_state_consistent'] = False + + results.evidences[test_name]['alerts_first_scan'] = results.vulnerabilities_alerts_first_scan + results.evidences[test_name]['states_first_scan'] = results.vulnerabilities_index_first_scan + + if not results.checks['index_state_consistent']: + pytest.fail("Test failed. Check logs for more information") + + def test_syscollector_second_scan(self, host_manager, setup_vulnerability_tests, get_results): + results = get_results + test_name = 'test_syscollector_second_scan' + + agents_to_check = results.evidences['test_syscollector_first_scan_alerts']['agents_not_scanned_first_scan'] + if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): + pytest.skip("Syscollector scan not started in any agent. Skipping test") + + monitoring_data = generate_monitoring_logs(host_manager, + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], + agents_to_check, 2) + + monitoring_results = monitoring_events_multihost(host_manager, monitoring_data) + + logger.critical("Checking that all agents has been scanned") + + for agent in monitoring_results: + if monitoring_results[agent]['not_found']: + results.checks['syscollector_second_scan'] = False + results.evidences[test_name]['agents_syscollector_second_scan_not_started'].append(agent) + + logging.critical(f"Syscollector scan not started in the following agents:" + f"{results.evidences['agents_syscollector_second_scan_not_started']}." + 'Continuing with the test') - test_result = all(tests_results['checks'].values()) + if not results.checks['syscollector_second_scan']: + logger.critical("Syscollector scan not started in the following agents:" + f"{results.evidences['agents_syscollector_second_scan_not_started']}.") + + pytest.fail("Test failed. Check logs for more information") + + def tests_syscollector_first_second_scan_consistency_alerts(self, host_manager, setup_vulnerability_tests, + get_results): + results = get_results + test_name = 'tests_syscollector_first_second_scan_consistency_alerts' + + agents_to_check = results.evidences['test_syscollector_first_scan_alerts']['agents_not_scanned_first_scan'] + if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): + pytest.skip("Syscollector scan not started in any agent. Skipping test") + + logger.critical("Waiting until agent's VD scan is over") + time.sleep(60) + logger.critical("Checking vulnerabilities in the second scan") + + alerts_second_scan = get_indexer_values(host_manager, + greater_than_timestamp=setup_vulnerability_tests)['hits']['hits'] + vuln_alerts_by_agent_second_scan = get_alerts_by_agent(alerts_second_scan, 'CVE.*? affects.*"?') + results.vulnerabilities_alerts_second_scan = vuln_alerts_by_agent_second_scan + + alert_present_in_first_scan_not_in_second_scan = [] + alert_present_in_second_scan_not_in_second_scan = [] + + if len(vuln_alerts_by_agent_second_scan) != len(results.vulnerabilities_alerts_first_scan): + results.checks['vulnerabilities_equal_between_scans'] = False + logger.critical(f"First scan: {len(results.vulnerabilities_alerts_first_scan)}") + logger.critical(f"Second scan: {len(vuln_alerts_by_agent_second_scan)}") + + logger.critical("Checking that all agents has been scanned") + # Check if the number of agents for each scan is the same + if list(results.vulnerabilities_alerts_first_scan.keys()) != list(vuln_alerts_by_agent_second_scan.keys()): + results.checks['vulnerabilities_equal_between_scans'] = False + logging.critical(f"Agents with vulnerabilities changed between scans: " + f"First scan: {list(results.vulnerabilities_alerts_first_scan.keys())}" + f"Second scan: {list(vuln_alerts_by_agent_second_scan.keys())}") + results[test_name]['evidences']['agents_different_between_scans'] = \ + list(set(list(results.vulnerabilities_alerts_first_scan.keys())) ^ set(list(vuln_alerts_by_agent_second_scan.keys()))) + + logger.critical("Checking that all agents has been scanned") + # Check if the number of vulnerabilities for each agent is the same + for agent in agents_to_check: + for alert in list(vuln_alerts_by_agent_second_scan[agent][0]): + alert_present_in_second_scan_not_in_second_scan.append(alert) + + for alert in list(results.vulnerabilities_alerts_first_scan[agent][0]): + if alert in alert_present_in_first_scan_not_in_second_scan: + alert_present_in_first_scan_not_in_second_scan.remove(alert) + + logger.critical("Checking that all agents has been scanned") + if alert_present_in_first_scan_not_in_second_scan or alert_present_in_second_scan_not_in_second_scan: + results.checks['vulnerabilities_equal_between_scans'] = False + results.evidences[test_name]['vulnerabilities_not_equal_between_scans_alerts'] = { + 'alert_present_in_first_scan_not_in_second_scan': alert_present_in_first_scan_not_in_second_scan, + 'alert_present_in_second_scan_not_in_second_scan': alert_present_in_second_scan_not_in_second_scan + } + if not results.checks['vulnerabilities_equal_between_scans']: + pytest.fail("Test failed. Check logs for more information") - if not test_result: - logger.critical("Test failed. Test results:") - if not tests_results['checks']['syscollector_first_scan']: - logger.critical("Syscollector scan not started in the following agents:" - f"{tests_results['evidences']['agents_syscollector_scan_not_started']}") + def tests_syscollector_first_second_scan_consistency_index(self, host_manager, setup_vulnerability_tests, + get_results): + results = get_results + test_name = 'tests_syscollector_first_second_scan_consistency_index' - if not tests_results['checks']['all_agents_scanned_first_scan']: - logger.critical("Not all agents were scanned in the first scan. Missing agents:" - f"{tests_results['evidences']['agents_not_scanned_first_scan']}") + agents_to_check = results.evidences['test_syscollector_first_scan_alerts']['agents_not_scanned_first_scan'] + if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): + pytest.skip("Syscollector scan not started in any agent. Skipping test") - if not tests_results['checks']['all_agents_vulnerabilities_detected']: - logger.critical("Not all agents generated vulnerabilities. Missing agents:" - f"{tests_results['evidences']['agents_not_detected_vulnerabilities']}") + logger.critical("Checking vulnerabilities in the second scan") + index_state_second_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', + greater_than_timestamp=setup_vulnerability_tests) + index_vulnerabilities_by_agent_second_scan = get_indexed_vulnerabilities_by_agent(index_state_second_scan) - if not tests_results['checks']['index_state_consistent']: - logger.critical("Index state is not consistent with the alerts. Inconsistencies:" - f"{tests_results['evidences']['index_state_inconsistencies']}") + results.vulnerabilities_index_second_scan = index_vulnerabilities_by_agent_second_scan - if not tests_results['checks']['syscollector_second_scan']: - logger.critical("Syscollector scan not started in the following agents:" - f"{tests_results['evidences']['agents_syscollector_second_scan_not_started']}") - - if not tests_results['checks']['vulnerabilities_equal_between_scans']: - logger.critical("The number of vulnerabilities is not the same between scans. Inconsistencies:" - f"{tests_results['evidences']['vulnerabilities_not_equal_between_scans']}") - - if not tests_results['checks']['vulnerabilities_equal_between_scans_indexer']: - logger.critical("The number of vulnerabilities is not the same between scans. Inconsistencies:" - f"{tests_results['evidences']['vulnerabilities_not_equal_between_scans_indexer']}") - - logger.critical("Gathering evidences") - - evidences_to_collect = { - "alerts_first_scan": vuln_alerts_by_agent_first_scan, - "alerts_second_scan": vuln_alerts_by_agent_second_scan, - "index_state_first_scan": index_state_first_scan, - "index_state_second_scan": index_state_second_scan, - "index_alerts_inconsistences": tests_results['evidences']['index_state_inconsistencies'], - "differences_alerts_between_scans": - tests_results['evidences']['vulnerabilities_not_equal_between_scans'], - "differences_index_between_scans": - tests_results['evidences']['vulnerabilities_not_equal_between_scans_indexer'] - } + differences = list(set(results.vulnerabilities_index_first_scan).symmetric_difference( + set(results.vulnerabilities_index_second_scan))) - collect_evidences(host_manager, request.node.name, evidences_to_collect) + if results.vulnerabilities_index_first_scan != results.vulnerabilities_index_second_scan: + results.checks['vulnerabilities_equal_between_scans_indexer'] = False + results[test_name]['evidences']['vulnerabilities_not_equal_between_scans_indexer'] = differences - pytest.fail("Test failed. Check logs for more information") + logger.critical("The number of vulnerabilities is not the same between scans") # ------------------------- @@ -534,9 +646,11 @@ def test_syscollector_initial_scans(request, host_manager, setup_vulnerability_t @pytest.mark.dependency() -# @pytest.mark.dependency(depends=["test_syscollector_second_scan"]) @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) -def test_vulnerability_detector_scans(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager): +#def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager): +def test_vulnerability_detector_scans_cases(preconditions, body, teardown, setup, host_manager): + logger.critical("Starting scan cases tests") + logger.critical(f"Case Info: {body}") # Launch tests tasks launch_parallel_operations(body['tasks'], host_manager) From cbc40532a096a0fd007eb8f09fc3dff906b29e45 Mon Sep 17 00:00:00 2001 From: Matias Pereyra Date: Thu, 18 Jan 2024 02:04:42 +0000 Subject: [PATCH 097/174] refactor: Removing references to the legacy vuln-det module --- .../data/agent/agent_messages.yaml | 406 ++++-------------- .../data/global/wazuh_db_backup_command.yaml | 133 +++--- .../data/wazuh_db_backups_conf.yaml | 56 +-- .../test_agent_database_version.py | 4 +- .../test_wazuh_db/test_wazuh_db.py | 6 +- 5 files changed, 186 insertions(+), 419 deletions(-) diff --git a/tests/integration/test_wazuh_db/data/agent/agent_messages.yaml b/tests/integration/test_wazuh_db/data/agent/agent_messages.yaml index e76e1ad0ac..da48875c70 100644 --- a/tests/integration/test_wazuh_db/data/agent/agent_messages.yaml +++ b/tests/integration/test_wazuh_db/data/agent/agent_messages.yaml @@ -1,326 +1,92 @@ ---- - - name: "Agents' CVEs table: vuln_cves" - description: "Checks the commands insert and clear" + name: Not existing agent + description: Check messages from not registered agents. test_case: - - - input: 'agent 000 vuln_cves insert {"name":"test_name", - "version":"1.0", - "architecture":"x64", - "cve":"CVE-2021-0001", - "reference":"03c06c4f118618400772367b1cf7e73ce0178e02", - "type":"PACKAGE", - "status":"VALID", - "check_pkg_existence":true, - "severity":null, - "cvss2_score":0, - "cvss3_score":0}' - output: 'ok {"action":"INSERT","status":"SUCCESS"}' - stage: "agent vuln_cves insert package when it does not exist in sys_programs" - - - input: 'agent 000 vuln_cves insert {"name":"test_package", - "version":"1.0", - "architecture":"x86", - "cve":"CVE-2021-1001", - "reference":"03c06c4f118618400772367b1cf7e73ce0178e02", - "type":"PACKAGE", - "status":"VALID", - "check_pkg_existence":false, - "severity":"Medium", - "cvss2_score":5.0, - "cvss3_score":6.1}' - output: 'ok {"action":"INSERT","status":"SUCCESS"}' - stage: "agent vuln_cves insert test_package without checking if the package is present in sys_programs" - - - input: 'agent 000 sql SELECT * FROM vuln_cves where cve="CVE-2021-1001"' - output: 'ok [{"name":"test_package","version":"1.0","architecture":"x86","cve":"CVE-2021-1001","detection_time":"*","severity":"Medium","cvss2_score":5,"cvss3_score":6.1,"reference":"03c06c4f118618400772367b1cf7e73ce0178e02","type":"PACKAGE","status":"VALID"}]' - stage: "agent vuln_cves checking test_package" - use_regex: "yes" - - - input: 'agent 000 vuln_cves insert {"name":"test_package", - "version":"1.0", - "architecture":"x86", - "cve":"CVE-2021-1001", - "reference":"03c06c4f118618400772367b1cf7e73ce0178e02", - "type":"PACKAGE", - "status":"VALID", - "check_pkg_existence":false, - "severity":"Medium", - "cvss2_score":5.0, - "cvss3_score":6.1}' - output: 'ok {"action":"UPDATE","status":"SUCCESS"}' - stage: "agent vuln_cves update already inserted entry" - - - input: 'agent 000 sql INSERT INTO sys_programs (scan_id,scan_time,format,name,priority,section,size,vendor,install_time,version,architecture,multiarch,source,description,location,triaged,cpe,msu_name,checksum,item_id) - VALUES(0,"2021/04/07 22:00:00","deb","test package","optional","utils","7490","Wazuh wazuh@wazuh.com",NULL,"1.0.0","amd64",NULL,NULL,"Test package",NULL,0,NULL,NULL,"e7dbc9bba5a0ee252866536225b952d3de7ea5cb","777fef8cc434b597769d102361af718d29ef72c1")' - output: 'ok []' - stage: "agent vuln_cves adding dummy test package to sys_programs" - - - input: 'agent 000 sql SELECT * FROM sys_programs WHERE name = "test package"' - output: 'ok [{"scan_id":0,"scan_time":"2021/04/07 22:00:00","format":"deb","name":"test package","priority":"optional","section":"utils","size":7490,"vendor":"Wazuh wazuh@wazuh.com","version":"1.0.0","architecture":"amd64","description":"Test package","triaged":0,"checksum":"e7dbc9bba5a0ee252866536225b952d3de7ea5cb","item_id":"777fef8cc434b597769d102361af718d29ef72c1"}]' - stage: "agent vuln_cves checking test package in sys_programs" - - - input: 'agent 000 vuln_cves insert {"name":"test package", - "version":"1.0", - "architecture":"x86", - "cve":"CVE-2021-1002", - "reference":"777fef8cc434b597769d102361af718d29ef72c1", - "type":"OS", - "status":"PENDING", - "check_pkg_existence":true, - "severity":null, - "cvss2_score":0, - "cvss3_score":0}' - output: 'ok {"action":"INSERT","status":"SUCCESS"}' - stage: "agent vuln_cves insert with spaces in json payload and the test package exist in sys_programs" - - - input: 'agent 000 sql SELECT * FROM vuln_cves WHERE name = "test package"' - output: 'ok [{"name":"test package","version":"1.0","architecture":"x86","cve":"CVE-2021-1002","detection_time":"*","cvss2_score":0,"cvss3_score":0,"reference":"777fef8cc434b597769d102361af718d29ef72c1","type":"OS","status":"PENDING"}]' - stage: "agent vuln_cves checking test package" - use_regex: "yes" - - - input: 'agent 000 vuln_cves insert {"name":"test_package","cve":"CVE-2021-1001"}' - output: "err Invalid JSON data, missing required fields" - stage: "agent vuln_cves insert incomplete package" - - - input: 'agent 000 vuln_cves insert {"name":"test_package",' - output: "err Invalid JSON syntax, near '{\"name\":\"test_package\",'" - stage: "agent vuln_cves insert invalid JSON" - - - input: 'agent 000 vuln_cves' - output: "err Invalid vuln_cves query syntax, near 'vuln_cves'" - stage: "agent vuln_cves missing command" - - - input: 'agent 000 vuln_cves insert' - output: "err Invalid JSON syntax, near ''" - stage: "agent vuln_cves missing payload" - - - input: 'agent 000 vuln_cves insert {"name":"test_package2", - "version":"3.0", - "architecture":"x86", - "cve":"CVE-2021-1001", - "reference":"99efe684b5ff4646b3c754de46cb6a9cbee9fbaa", - "type":"PACKAGE", - "status":"VALID", - "check_pkg_existence":false, - "severity":"Untriaged", - "cvss2_score":0, - "cvss3_score":0}' - output: 'ok {"action":"INSERT","status":"SUCCESS"}' - stage: "agent vuln_cves insert package with same CVE without checking if the package is present in sys_programs" - - - input: 'agent 000 sql SELECT * FROM vuln_cves WHERE name = "test_package2"' - output: 'ok []' - stage: "agent vuln_cves checking package insertion with same CVE" - use_regex: "yes" - - - input: 'agent 000 vuln_cves insert {"name":"test_package2", - "version":"3.0", - "architecture":"x86", - "cve":"CVE-2021-1002", - "reference":"99efe684b5ff4646b3c754de46cb6a9cbee9fbaa", - "type":"PACKAGE", - "status":"VALID", - "check_pkg_existence":false, - "severity":"High", - "cvss2_score":8.2, - "cvss3_score":9.35}' - output: 'ok {"action":"INSERT","status":"SUCCESS"}' - stage: "agent vuln_cves insert same package with different CVE without checking if the package is present in sys_programs" - - - input: 'agent 000 sql SELECT * FROM vuln_cves WHERE name = "test_package2" AND cve = "CVE-2021-1002"' - output: 'ok [{"name":"test_package2","version":"3.0","architecture":"x86","cve":"CVE-2021-1002","detection_time":"*","severity":"High","cvss2_score":8.2,"cvss3_score":9.35,"reference":"99efe684b5ff4646b3c754de46cb6a9cbee9fbaa","type":"PACKAGE","status":"VALID"}]' - stage: "agent vuln_cves checking package with different CVE" - use_regex: "yes" - - - input: 'agent 000 vuln_cves update_status {"old_status":"PENDING", - "new_status":"OBSOLETE"}' - output: 'ok' - stage: "agent vuln_cves update specific status to another one" - - - input: 'agent 000 sql SELECT distinct status FROM vuln_cves' - output: 'ok [{"status":"OBSOLETE"},{"status":"VALID"}]' - stage: 'agent vuln_cves checking change specific status by another one' - - - input: 'agent 000 vuln_cves update_status {"new_status":"PENDING", - "type":"OS"}' - output: 'ok' - stage: 'agent vuln_cves update status by type "OS"' - - - input: 'agent 000 sql SELECT count(status) FROM vuln_cves WHERE type = "OS"' - output: 'ok [{"count(status)":1}]' - stage: 'agent vuln_cves checking update status by type "OS"' - - - input: 'agent 000 vuln_cves update_status {"new_status":"PENDING", - "type":"PACKAGE"}' - output: 'ok' - stage: 'agent vuln_cves update status by type "PACKAGE"' - - - input: 'agent 000 sql SELECT count(status) FROM vuln_cves WHERE type = "PACKAGE"' - output: 'ok [{"count(status)":4}]' - stage: 'agent vuln_cves checking update status by type "PACKAGE"' - - - input: 'agent 000 vuln_cves update_status {"new_status":"VALID", - "type":"PACKAGE"}' - output: 'ok' - stage: 'agent vuln_cves update status by type "PACKAGE" again' - - - input: 'agent 000 sql SELECT count(status) FROM vuln_cves WHERE type = "PACKAGE"' - output: 'ok [{"count(status)":4}]' - stage: 'agent vuln_cves checking update status by type "PACKAGE" again' - - - input: 'agent 000 vuln_cves remove {"status":"PENDING"}' - output: 'ok [{"name":"test package","version":"1.0","architecture":"x86","cve":"CVE-2021-1002","detection_time":"*","cvss2_score":0,"cvss3_score":0,"reference":"777fef8cc434b597769d102361af718d29ef72c1","type":"OS","status":"PENDING"}]' - stage: 'agent vuln_cves remove by status' - use_regex: "yes" - - - input: 'agent 000 sql SELECT distinct status FROM vuln_cves' - output: 'ok [{"status":"VALID"}]' - stage: 'agent vuln_cves checking remove by status' - - - input: 'agent 000 vuln_cves insert {"name":"test package", - "version":"1.0", - "architecture":"x86", - "cve":"CVE-2021-1002", - "reference":"777fef8cc434b597769d102361af718d29ef72c1", - "type":"OS", - "status":"PENDING", - "check_pkg_existence":true, - "severity":"Low", - "cvss2_score":3.2, - "cvss3_score":2.1}' - output: 'ok {"action":"INSERT","status":"SUCCESS"}' - stage: "agent vuln_cves insert with spaces in json payload and the test package exist in sys_programs again" - - - input: 'agent 000 sql SELECT distinct status FROM vuln_cves' - output: 'ok [{"status":"PENDING"},{"status":"VALID"}]' - stage: "agent vuln_cves checking status" - - - input: 'agent 000 vuln_cves update_status {"old_status":"*", - "new_status":"OBSOLETE"}' - output: 'ok' - stage: "agent vuln_cves update all status" - - - input: 'agent 000 sql SELECT distinct status FROM vuln_cves' - output: 'ok [{"status":"OBSOLETE"}]' - stage: 'agent vuln_cves checking update all status' - - - input: 'agent 000 vuln_cves remove {"status":"OBSOLETE"}' - output: 'ok [{"name":"test_name","version":"1.0","architecture":"x64","cve":"CVE-2021-0001","detection_time":"*","cvss2_score":0,"cvss3_score":0,"reference":"03c06c4f118618400772367b1cf7e73ce0178e02","type":"PACKAGE","status":"OBSOLETE"},{"name":"test_package","version":"1.0","architecture":"x86","cve":"CVE-2021-1001","detection_time":"*","severity":"Medium","cvss2_score":5,"cvss3_score":6.1,"reference":"03c06c4f118618400772367b1cf7e73ce0178e02","type":"PACKAGE","status":"OBSOLETE"},{"name":"test_package2","version":"3.0","architecture":"x86","cve":"CVE-2021-1001","detection_time":"*","severity":"Untriaged","cvss2_score":0,"cvss3_score":0,"reference":"99efe684b5ff4646b3c754de46cb6a9cbee9fbaa","type":"PACKAGE","status":"OBSOLETE"},{"name":"test_package2","version":"3.0","architecture":"x86","cve":"CVE-2021-1002","detection_time":"*","severity":"High","cvss2_score":8.2,"cvss3_score":9.35,"reference":"99efe684b5ff4646b3c754de46cb6a9cbee9fbaa","type":"PACKAGE","status":"OBSOLETE"},{"name":"test package","version":"1.0","architecture":"x86","cve":"CVE-2021-1002","detection_time":"*","severity":"Low","cvss2_score":3.2,"cvss3_score":2.1,"reference":"777fef8cc434b597769d102361af718d29ef72c1","type":"OS","status":"OBSOLETE"}]' - stage: 'agent vuln_cves remove by status to clear vuln_cve table' - use_regex: "yes" - - - input: 'agent 000 sql SELECT * FROM vuln_cves' - output: 'ok []' - stage: "agent vuln_cve checking empty table again" + - + input: 'agent 004 syscheck delete ' + output: err Agent not found + stage: Syscheck query to a non-existing agent - - name: 'Not existing agent' - description: 'Check messages from not registered agents.' + name: Agents OS table sys_osinfo + description: It checks the commands get and set test_case: - - - input: 'agent 004 syscheck delete ' - output: 'err Agent not found' - stage: 'Syscheck query to a non-existing agent' + - + input: agent 000 sql DELETE FROM sys_osinfo + output: ok [] + stage: agent sys_osinfo cleaning sys_osinfo table + - + input: agent 000 osinfo get + output: ok [] + stage: agent sys_osinfo checking table is empty + - + input: agent 000 osinfo set 0|2021/04/08 10:00:00|focal|x86_64|Ubuntu|20.04.2 LTS (Focal Fossa)|focal|20|04|1|ubuntu|Linux|5.4.0-70-generic|#78-Ubuntu SMP Thu Apr 08 10:00:00 UTC 2021|1|2|NULL + output: ok + stage: agent sys_osinfo set information + - + input: agent 000 osinfo get + output: ok [{"scan_id":0,"scan_time":"2021/04/08 10:00:00","hostname":"focal","architecture":"x86_64","os_name":"Ubuntu","os_version":"20.04.2 LTS (Focal Fossa)","os_codename":"focal","os_major":"20","os_minor":"04","os_patch":"2","os_build":"1","os_platform":"ubuntu","sysname":"Linux","release":"5.4.0-70-generic","version":"#78-Ubuntu SMP Thu Apr 08 10:00:00 UTC 2021","os_release":"1","checksum":"legacy","reference":"54d5344c8f49eae38d81651495227c5080755b45"}] + stage: agent sys_osinfo getting information + - + input: agent 000 sql DELETE FROM sys_osinfo + output: ok [] + stage: agent sys_osinfo cleaning sys_osinfo table + - + input: agent 000 osinfo get + output: ok [] + stage: agent sys_osinfo checking table is empty - - name: "Agents' OS table: sys_osinfo" - description: "It checks the commands get, set and set_triaged" + name: "Agents Packages/Hotfixes tables: sys_programs and sys_hotfixes" + description: It checks the commands get packages and get hotfixes test_case: - - - input: 'agent 000 sql DELETE FROM sys_osinfo' - output: "ok []" - stage: "agent sys_osinfo cleaning sys_osinfo table" - - - input: 'agent 000 osinfo get' - output: "ok []" - stage: "agent sys_osinfo checking table is empty" - - - input: 'agent 000 osinfo set 0|2021/04/08 10:00:00|focal|x86_64|Ubuntu|20.04.2 LTS (Focal Fossa)|focal|20|04|1|ubuntu|Linux|5.4.0-70-generic|#78-Ubuntu SMP Thu Apr 08 10:00:00 UTC 2021|1|2|NULL' - output: 'ok' - stage: "agent sys_osinfo set information" - - - input: 'agent 000 osinfo get' - output: 'ok [{"scan_id":0,"scan_time":"2021/04/08 10:00:00","hostname":"focal","architecture":"x86_64","os_name":"Ubuntu","os_version":"20.04.2 LTS (Focal Fossa)","os_codename":"focal","os_major":"20","os_minor":"04","os_patch":"2","os_build":"1","os_platform":"ubuntu","sysname":"Linux","release":"5.4.0-70-generic","version":"#78-Ubuntu SMP Thu Apr 08 10:00:00 UTC 2021","os_release":"1","checksum":"legacy","triaged":0,"reference":"54d5344c8f49eae38d81651495227c5080755b45"}]' - stage: "agent sys_osinfo getting information" - - - input: 'agent 000 osinfo set_triaged' - output: 'ok' - stage: "agent sys_osinfo set triaged" - - - input: 'agent 000 sql SELECT triaged FROM sys_osinfo WHERE triaged = 1' - output: 'ok [{"triaged":1}]' - stage: "agent sys_osinfo checking triaged" - - - input: 'agent 000 sql DELETE FROM sys_osinfo' - output: "ok []" - stage: "agent sys_osinfo cleaning sys_osinfo table" - - - input: 'agent 000 osinfo get' - output: "ok []" - stage: "agent sys_osinfo checking table is empty" -- - name: "Agents' Packages/Hotfixes tables: sys_programs and sys_hotfixes" - description: "It checks the commands get packages and get hotfixes" - test_case: - - - input: 'agent 003 package save 0|2021/04/07 22:00:00|deb|test_deb_pkg|optional|utils|7490|Wazuh wazuh@wazuh.com|NULL|1.0.0|amd64|NULL|NULL|Test package|NULL|1' - output: 'ok' - stage: "agent sys_programs adding dummy package" - - - input: 'agent 003 package save 0|2021/04/07 22:00:00|rpm|test_rpm_pkg|optional|utils|7490|Wazuh wazuh@wazuh.com|NULL|1.0.0|amd64|NULL|NULL|Test package|NULL|1' - output: 'ok' - stage: "agent sys_programs adding dummy package" - - - input: 'agent 003 sql select count(*) from sys_programs' - output: 'ok [{"count(*)":2}]' - stage: "agent sys_programs count packages added" - - - input: 'agent 003 package get' - output: 'ok {"status":"NOT_SYNCED"}' - stage: "agent sys_programs getting not synced packages attempt" - - - input: 'agent 003 hotfix save 0|0|KB2980293|legacy' - output: 'ok' - stage: "agent sys_hotfixes adding dummy hotfix" - - - input: 'agent 003 hotfix save 0|0|KB2980294|legacy' - output: 'ok' - stage: "agent sys_hotfixes adding dummy hotfix" - - - input: 'agent 003 hotfix save 0|0|KB2980295|legacy' - output: 'ok' - stage: "agent sys_hotfixes adding dummy hotfix" - - - input: 'agent 003 sql SELECT count(*) FROM sys_hotfixes' - output: 'ok [{"count(*)":3}]' - stage: "agent sys_hotfixes count hotfixes added" - - - input: 'agent 003 hotfix get' - output: 'ok {"status":"NOT_SYNCED"}' - stage: "agent sys_programs getting not synced packages attempt" - - - input: 'agent 003 sql UPDATE sync_info SET last_attempt = 1, last_completion = 1 where component = "syscollector-hotfixes"' - output: 'ok []' - stage: "agent sync_info set synced" - - - input: 'agent 003 hotfix get' - output: ['due {"hotfix":"KB2980293"}','due {"hotfix":"KB2980294"}','due {"hotfix":"KB2980295"}','ok {"status":"SUCCESS"}'] - stage: "agent sys_hotfixes getting hotfixes" - - - input: 'agent 003 sql UPDATE sync_info SET last_attempt = 1, last_completion = 1 where component = "syscollector-packages"' - output: 'ok []' - stage: "agent sync_info set synced" - - - input: 'agent 003 sql UPDATE sys_programs SET triaged = 1 WHERE name = "test_rpm_pkg"' - output: 'ok []' - stage: "agent sys_programs set package as triaged" - - - input: 'agent 003 package get not_triaged' - output: ['due {"name":"test_deb_pkg","version":"1.0.0","architecture":"amd64","vendor":"Wazuh wazuh@wazuh.com","item_id":"1"}', - 'ok {"status":"SUCCESS"}'] - stage: "agent sys_programs getting not triaged packages" - - - input: 'agent 003 package get' - output: ['due {"name":"test_deb_pkg","version":"1.0.0","architecture":"amd64","vendor":"Wazuh wazuh@wazuh.com","item_id":"1"}', - 'due {"name":"test_rpm_pkg","version":"1.0.0","architecture":"amd64","vendor":"Wazuh wazuh@wazuh.com","item_id":"1"}', - 'ok {"status":"SUCCESS"}'] - stage: "agent sys_programs getting not all packages" + - + input: agent 003 package save 0|2021/04/07 22:00:00|deb|test_deb_pkg|optional|utils|7490|Wazuh wazuh@wazuh.com|NULL|1.0.0|amd64|NULL|NULL|Test package|NULL|1 + output: ok + stage: agent sys_programs adding dummy package + - + input: agent 003 package save 0|2021/04/07 22:00:00|rpm|test_rpm_pkg|optional|utils|7490|Wazuh wazuh@wazuh.com|NULL|1.0.0|amd64|NULL|NULL|Test package|NULL|1 + output: ok + stage: agent sys_programs adding dummy package + - + input: agent 003 sql select count(*) from sys_programs + output: ok [{"count(*)":2}] + stage: agent sys_programs count packages added + - + input: agent 003 package get + output: ok {"status":"NOT_SYNCED"} + stage: agent sys_programs getting not synced packages attempt + - + input: agent 003 hotfix save 0|0|KB2980293|legacy + output: ok + stage: agent sys_hotfixes adding dummy hotfix + - + input: agent 003 hotfix save 0|0|KB2980294|legacy + output: ok + stage: agent sys_hotfixes adding dummy hotfix + - + input: agent 003 hotfix save 0|0|KB2980295|legacy + output: ok + stage: agent sys_hotfixes adding dummy hotfix + - + input: agent 003 sql SELECT count(*) FROM sys_hotfixes + output: ok [{"count(*)":3}] + stage: agent sys_hotfixes count hotfixes added + - + input: agent 003 hotfix get + output: ok {"status":"NOT_SYNCED"} + stage: agent sys_programs getting not synced packages attempt + - + input: agent 003 sql UPDATE sync_info SET last_attempt = 1, last_completion = 1 where component = "syscollector-hotfixes" + output: ok [] + stage: agent sync_info set synced + - + input: agent 003 hotfix get + output: ['due {"hotfix":"KB2980293"}','due {"hotfix":"KB2980294"}','due {"hotfix":"KB2980295"}','ok {"status":"SUCCESS"}'] + stage: agent sys_hotfixes getting hotfixes + - + input: agent 003 sql UPDATE sync_info SET last_attempt = 1, last_completion = 1 where component = "syscollector-packages" + output: ok [] + stage: agent sync_info set synced + - + input: agent 003 package get + output: ['due {"name":"test_deb_pkg","version":"1.0.0","architecture":"amd64","vendor":"Wazuh wazuh@wazuh.com","item_id":"1"}','due {"name":"test_rpm_pkg","version":"1.0.0","architecture":"amd64","vendor":"Wazuh wazuh@wazuh.com","item_id":"1"}','ok {"status":"SUCCESS"}'] + stage: agent sys_programs getting not all packages diff --git a/tests/integration/test_wazuh_db/data/global/wazuh_db_backup_command.yaml b/tests/integration/test_wazuh_db/data/global/wazuh_db_backup_command.yaml index 55a3a5f440..7a49a86888 100644 --- a/tests/integration/test_wazuh_db/data/global/wazuh_db_backup_command.yaml +++ b/tests/integration/test_wazuh_db/data/global/wazuh_db_backup_command.yaml @@ -1,89 +1,88 @@ ---- - - name: "Create One Backup File" + name: Create One Backup File test_case: - - - backups_amount: 1 - command: 'global backup create' + - + backups_amount: 1 + command: global backup create - - name: "Create Three Backups Files" + name: Create Three Backups Files test_case: - - - backups_amount: 3 - command: 'global backup create' + - + backups_amount: 3 + command: global backup create - - name: "Restore backup file saving previous state - save_pre_restore_state == false" + name: Restore backup file saving previous state - save_pre_restore_state == false test_case: - - - backups_amount: 1 - restore: true - save_pre_restore: 'false' - restore_response: ok + - + backups_amount: 1 + restore: true + save_pre_restore: 'false' + restore_response: ok - - name: "Restore backup file saving previous state - save_pre_restore_state == true" + name: Restore backup file saving previous state - save_pre_restore_state == true test_case: - - - backups_amount: 1 - restore: true - save_pre_restore: 'true' - restore_response: ok + - + backups_amount: 1 + restore: true + save_pre_restore: 'true' + restore_response: ok - - name: "Restore backup from pre_restore backup - database should not have the test_values" + name: Restore backup from pre_restore backup - database should not have the test_values test_case: - - - backups_amount: 1 - restore: true - save_pre_restore: 'true' - restore_response: ok - restore_pre_restore: true + - + backups_amount: 1 + restore: true + save_pre_restore: 'true' + restore_response: ok + restore_pre_restore: true - - name: "Restore backup with no save_pre_restore_state - pre_restore backup is generated normally" + name: Restore backup with no save_pre_restore_state - pre_restore backup is generated normally test_case: - - - backups_amount: 1 - restore: true - save_pre_restore: 'none' - restore_response: ok + - + backups_amount: 1 + restore: true + save_pre_restore: none + restore_response: ok - - name: "ERROR - Restore backup with empty pre_restore value - save_pre_restore_state =='' " + name: ERROR - Restore backup with empty pre_restore value - save_pre_restore_state =='' test_case: - - - backups_amount: 1 - restore: true - save_pre_restore: '' - restore_response: err Invalid JSON syntax + - + backups_amount: 1 + restore: true + save_pre_restore: '' + restore_response: err Invalid JSON syntax - - name: "ERROR - Restore backup with invalid pre_restore value - save_pre_restore_state == value " + name: ERROR - Restore backup with invalid pre_restore value - save_pre_restore_state == value test_case: - - - backups_amount: 1 - restore: true - save_pre_restore: value - restore_response: err Invalid JSON syntax + - + backups_amount: 1 + restore: true + save_pre_restore: value + restore_response: err Invalid JSON syntax - - name: "ERROR - Restore backup with invalid snapshot value - snapshot == invalid_snapshot_value" + name: ERROR - Restore backup with invalid snapshot value - snapshot == invalid_snapshot_value test_case: - - - backups_amount: 1 - restore: true - snapshot: '"snapshot":"invalid_snapshot_value"' - save_pre_restore: false - restore_response: err Invalid JSON syntax + - + backups_amount: 1 + restore: true + snapshot: '"snapshot":"invalid_snapshot_value"' + save_pre_restore: false + restore_response: err Invalid JSON syntax - - name: "ERROR - Restore backup with empty snapshot value - snapshot ==''" + name: ERROR - Restore backup with empty snapshot value - snapshot =='' test_case: - - - backups_amount: 1 - restore: true - snapshot: '"snapshot":""' - save_pre_restore: false - restore_response: err Invalid JSON syntax + - + backups_amount: 1 + restore: true + snapshot: '"snapshot":""' + save_pre_restore: false + restore_response: err Invalid JSON syntax - - name: "ERROR - Restore backup with no snapshot parameter" + name: ERROR - Restore backup with no snapshot parameter test_case: - - - backups_amount: 1 - restore: true - snapshot: - save_pre_restore: false - restore_response: err Invalid JSON syntax \ No newline at end of file + - + backups_amount: 1 + restore: true + snapshot: None + save_pre_restore: false + restore_response: err Invalid JSON syntax diff --git a/tests/integration/test_wazuh_db/data/wazuh_db_backups_conf.yaml b/tests/integration/test_wazuh_db/data/wazuh_db_backups_conf.yaml index c68393236e..25fbc61ee7 100644 --- a/tests/integration/test_wazuh_db/data/wazuh_db_backups_conf.yaml +++ b/tests/integration/test_wazuh_db/data/wazuh_db_backups_conf.yaml @@ -1,31 +1,33 @@ - tags: + - wazuh_db + - wdb_socket apply_to_modules: - - test_db_backup - - test_wdb_backup_configs + - test_db_backup + - test_wdb_backup_configs sections: - - section: wdb - elements: - - backup: - attributes: - - database: 'global' - elements: + - section: wdb + elements: + - backup: + attributes: + - database: global + elements: + - enabled: + value: ENABLED + - interval: + value: INTERVAL + - max_files: + value: MAX_FILES + - section: sca + elements: - enabled: - value: ENABLED - - interval: - value: INTERVAL - - max_files: - value: MAX_FILES - - section: sca - elements: - - enabled: - value: 'no' - - section: rootcheck - elements: - - disabled: - value: 'yes' - - section: wodle - attributes: - - name: 'syscollector' - elements: - - disabled: - value: 'yes' + value: 'no' + - section: rootcheck + elements: + - disabled: + value: 'yes' + - section: wodle + attributes: + - name: syscollector + elements: + - disabled: + value: 'yes' diff --git a/tests/integration/test_wazuh_db/test_agent_database_version.py b/tests/integration/test_wazuh_db/test_agent_database_version.py index e3f2b0689e..54b989d212 100644 --- a/tests/integration/test_wazuh_db/test_agent_database_version.py +++ b/tests/integration/test_wazuh_db/test_agent_database_version.py @@ -9,7 +9,7 @@ pytestmark = [TIER0, LINUX, SERVER] # Variables -expected_database_version = '12' +expected_database_version = '13' # Fixtures @@ -45,7 +45,7 @@ def test_agent_database_version(restart_wazuh_daemon, remove_agents): - Verify that database version is the expected one. expected_output: - - Database version: 12 + - Database version: 13 tags: - wazuh_db diff --git a/tests/integration/test_wazuh_db/test_wazuh_db.py b/tests/integration/test_wazuh_db/test_wazuh_db.py index 5e2d799465..9ca285602f 100644 --- a/tests/integration/test_wazuh_db/test_wazuh_db.py +++ b/tests/integration/test_wazuh_db/test_wazuh_db.py @@ -300,10 +300,10 @@ def pre_insert_packages(): for pkg_n in range(PACKAGES_NUMBER): command = f"agent 000 sql INSERT OR REPLACE INTO sys_programs \ (scan_id,scan_time,format,name,priority,section,size,vendor,install_time,version,\ - architecture,multiarch,source,description,location,triaged,cpe,msu_name,checksum,item_id)\ + architecture,multiarch,source,description,location,cpe,msu_name,checksum,item_id)\ VALUES(0,'2021/04/07 22:00:00','deb','test_package_{pkg_n}','optional','utils',{random.randint(200,1000)},\ 'Wazuh wazuh@wazuh.com',NULL,'{random.randint(1,10)}.0.0','all',NULL,NULL,'Test package {pkg_n}',\ - NULL,0,NULL,NULL,'{random.getrandbits(128)}','{random.getrandbits(128)}')" + NULL,NULL,NULL,'{random.getrandbits(128)}','{random.getrandbits(128)}')" receiver_sockets[0].send(command, size=True) response = receiver_sockets[0].receive(size=True).decode() data = response.split() @@ -379,7 +379,7 @@ def test_wazuh_db_messages_agent(restart_wazuh, clean_registered_agents, configu match = True if regex_match(expected_output, response) else False else: match = validate_wazuh_db_response(expected_output, response) - assert match, 'Failed test case stage {}: {}. Expected: {}. Response: {}' \ + assert match, 'Failed test case stage {}: {}. Expected: "{}". Response: "{}".' \ .format(index + 1, stage['stage'], expected_output, response) From 089b1179f9584989bebccab1feb8289ccdf85f99 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 18 Jan 2024 18:38:51 +0000 Subject: [PATCH 098/174] docs: include 4868 changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 23f344d4fc..42a0b29546 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ All notable changes to this project will be documented in this file. ### Added +- Agent Simulator: Syscollector message generation refactoring ([#4868](https://github.com/wazuh/wazuh-qa/pull/4868)) \- (Framework) - Migrate Wazuh Ansibles Roles. ([#4642](https://github.com/wazuh/wazuh-qa/pull/4642)) \- (Framework) - Add scans environment setup documentation. ([#4444](https://github.com/wazuh/wazuh-qa/pull/4444)) \- (Tests) - Add system test for global group hash ([#4015](https://github.com/wazuh/wazuh-qa/pull/4015)) \- (Tests) From 2544fbb1f4e71f7296447995f7cfd5a651c6142e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 18 Jan 2024 17:14:26 +0000 Subject: [PATCH 099/174] feat: include parameter to allow custom packages to simulate agent --- .../data/syscollector_parsed_packages.json | 761 ++++++++++++++++++ .../wazuh_testing/scripts/simulate_agents.py | 8 +- 2 files changed, 768 insertions(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json b/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json index edb0840c08..f03fc7204a 100644 --- a/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json +++ b/deps/wazuh_testing/wazuh_testing/data/syscollector_parsed_packages.json @@ -113,5 +113,766 @@ "vendor": "openbsd", "product": "openbsd", "version": "2.4" + }, + { + "vendor": "bsdi", + "product": "bsd_os", + "version": "1.1" + }, + { + "vendor": "caldera", + "product": "openlinux", + "version": "1.2" + }, + { + "vendor": "redhat", + "product": "linux", + "version": "2.0" + }, + { + "vendor": "redhat", + "product": "linux", + "version": "2.1" + }, + { + "vendor": "redhat", + "product": "linux", + "version": "3.0.3" + }, + { + "vendor": "redhat", + "product": "linux", + "version": "4.0" + }, + { + "vendor": "redhat", + "product": "linux", + "version": "4.1" + }, + { + "vendor": "redhat", + "product": "linux", + "version": "4.2" + }, + { + "vendor": "redhat", + "product": "linux", + "version": "5.0" + }, + { + "vendor": "redhat", + "product": "linux", + "version": "5.1" + }, + { + "vendor": "tritreal", + "product": "ted_cde", + "version": "4.3" + }, + { + "vendor": "hp", + "product": "hp-ux", + "version": "10.01" + }, + { + "vendor": "hp", + "product": "hp-ux", + "version": "10.02" + }, + { + "vendor": "hp", + "product": "hp-ux", + "version": "10.03" + }, + { + "vendor": "hp", + "product": "hp-ux", + "version": "11.00" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.1" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.1.1" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.1.2" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.1.3" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.1.4" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.1.5" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.2" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.2.1" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.3" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "5.2" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "5.3" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "6.0" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "6.1" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "6.2" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "6.3" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "6.4" + }, + { + "vendor": "sun", + "product": "solaris", + "version": "2.6" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "0" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "4.1.3" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.0" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.1" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.2" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.3" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.4" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.5" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.5.1" + }, + { + "vendor": "hp", + "product": "dtmail", + "version": "0" + }, + { + "vendor": "university_of_washington", + "product": "pine", + "version": "4.02" + }, + { + "vendor": "sco", + "product": "unixware", + "version": "7.0" + }, + { + "vendor": "netscape", + "product": "messaging_server", + "version": "3.55" + }, + { + "vendor": "university_of_washington", + "product": "imap", + "version": "10.234" + }, + { + "vendor": "qualcomm", + "product": "qpopper", + "version": "2.4" + }, + { + "vendor": "c2net", + "product": "stonghold_web_server", + "version": "2.0.1" + }, + { + "vendor": "c2net", + "product": "stonghold_web_server", + "version": "2.2" + }, + { + "vendor": "c2net", + "product": "stonghold_web_server", + "version": "2.3" + }, + { + "vendor": "hp", + "product": "open_market_secure_webserver", + "version": "2.1" + }, + { + "vendor": "microsoft", + "product": "exchange_server", + "version": "5.5" + }, + { + "vendor": "microsoft", + "product": "internet_information_server", + "version": "3.0" + }, + { + "vendor": "microsoft", + "product": "internet_information_server", + "version": "4.0" + }, + { + "vendor": "microsoft", + "product": "site_server", + "version": "3.0" + }, + { + "vendor": "netscape", + "product": "certificate_server patch1", + "version": "1.0" + }, + { + "vendor": "netscape", + "product": "collabra_server", + "version": "3.5.2" + }, + { + "vendor": "netscape", + "product": "directory_server patch5", + "version": "1.3" + }, + { + "vendor": "netscape", + "product": "directory_server", + "version": "3.12" + }, + { + "vendor": "netscape", + "product": "directory_server patch1", + "version": "3.1" + }, + { + "vendor": "netscape", + "product": "enterprise_server", + "version": "2.0" + }, + { + "vendor": "netscape", + "product": "enterprise_server", + "version": "3.0.1b" + }, + { + "vendor": "netscape", + "product": "enterprise_server", + "version": "3.5.1" + }, + { + "vendor": "netscape", + "product": "fasttrack_server", + "version": "3.01b" + }, + { + "vendor": "netscape", + "product": "messaging_server", + "version": "3.54" + }, + { + "vendor": "netscape", + "product": "proxy_server", + "version": "3.5.1" + }, + { + "vendor": "ssleay", + "product": "ssleay", + "version": "0.6.6" + }, + { + "vendor": "ssleay", + "product": "ssleay", + "version": "0.8.1" + }, + { + "vendor": "ssleay", + "product": "ssleay", + "version": "0.9" + }, + { + "vendor": "hp", + "product": "hp-ux", + "version": "10.34" + }, + { + "vendor": "hp", + "product": "hp-ux", + "version": "11.00" + }, + { + "vendor": "sun", + "product": "solaris", + "version": "2.6" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.3" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.4" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.5" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.5.1" + }, + { + "vendor": "data_general", + "product": "dg_ux", + "version": "5.4_3.0" + }, + { + "vendor": "data_general", + "product": "dg_ux", + "version": "5.4_3.1" + }, + { + "vendor": "data_general", + "product": "dg_ux", + "version": "5.4_4.1" + }, + { + "vendor": "data_general", + "product": "dg_ux", + "version": "5.4_4.11" + }, + { + "vendor": "isc", + "product": "bind", + "version": "4.9.6" + }, + { + "vendor": "isc", + "product": "bind", + "version": "8.1" + }, + { + "vendor": "isc", + "product": "bind", + "version": "8.1.1" + }, + { + "vendor": "bsdi", + "product": "bsd_os", + "version": "2.0" + }, + { + "vendor": "bsdi", + "product": "bsd_os", + "version": "2.0.1" + }, + { + "vendor": "bsdi", + "product": "bsd_os", + "version": "2.1" + }, + { + "vendor": "caldera", + "product": "openlinux", + "version": "1.0" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.1" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.1.1" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.1.2" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.1.3" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.1.4" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.1.5" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.2" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.2.1" + }, + { + "vendor": "ibm", + "product": "aix", + "version": "4.3" + }, + { + "vendor": "nec", + "product": "asl_ux_4800", + "version": "64" + }, + { + "vendor": "netbsd", + "product": "netbsd", + "version": "1.0" + }, + { + "vendor": "netbsd", + "product": "netbsd", + "version": "1.1" + }, + { + "vendor": "netbsd", + "product": "netbsd", + "version": "1.2" + }, + { + "vendor": "netbsd", + "product": "netbsd", + "version": "1.2.1" + }, + { + "vendor": "netbsd", + "product": "netbsd", + "version": "1.3" + }, + { + "vendor": "netbsd", + "product": "netbsd", + "version": "1.3.1" + }, + { + "vendor": "redhat", + "product": "linux", + "version": "4.0" + }, + { + "vendor": "redhat", + "product": "linux", + "version": "4.1" + }, + { + "vendor": "redhat", + "product": "linux", + "version": "4.2" + }, + { + "vendor": "redhat", + "product": "linux", + "version": "5.0" + }, + { + "vendor": "sco", + "product": "open_desktop", + "version": "3.0" + }, + { + "vendor": "sco", + "product": "open_desktop", + "version": "5.0" + }, + { + "vendor": "sco", + "product": "unixware", + "version": "2.1" + }, + { + "vendor": "sco", + "product": "unixware", + "version": "7.0" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "3.2" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "3.3" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "3.3.1" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "3.3.2" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "3.3.3" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.1" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.1t" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.2" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.3" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.4" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.4b" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.4t" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.5" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.5_iop" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.5_ipr" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.5a" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.5d" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.5e" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.5f" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.5g" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "4.0.5h" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "5.0" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "5.0.1" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "5.1" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "5.1.1" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "5.2" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "5.3" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "6.0" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "6.1" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "6.2" + }, + { + "vendor": "sgi", + "product": "irix", + "version": "6.3" + }, + { + "vendor": "sun", + "product": "solaris ppc", + "version": "2.5.1" + }, + { + "vendor": "sun", + "product": "solaris x86", + "version": "2.5" + }, + { + "vendor": "sun", + "product": "solaris x86", + "version": "2.5.1" + }, + { + "vendor": "sun", + "product": "solaris", + "version": "2.6" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "0" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.3" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.4" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.5" + }, + { + "vendor": "sun", + "product": "sunos", + "version": "5.5.1" } ] + diff --git a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py index 74082371c2..3e5a999c85 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/simulate_agents.py @@ -107,7 +107,8 @@ def create_agent(args, custom_labels): 'custom_logcollector_message': args.custom_logcollector_message, 'syscollector_frequency': args.syscollector_frequency, 'syscollector_event_types': args.syscollector_event_types, - 'syscollector_legacy_messages': args.syscollector_legacy_messages + 'syscollector_legacy_messages': args.syscollector_legacy_messages, + 'syscollector_packages_vuln_content': args.syscollector_packages_list_file } agent = ag.Agent(**agent_args) @@ -398,6 +399,11 @@ def main(): default=False, dest='syscollector_legacy_messages') + arg_parser.add_argument('--syscollector-packages-list-file', metavar='', + type=str, help='''File containing a list of packages to be sent by syscollector. + One package per line. Default is None.''', required=False, default=None, + dest='syscollector_packages_list_file') + args = arg_parser.parse_args() process_script_parameters(args) From 6de68b44d4b0b26904ac224263b550458ef3f28a Mon Sep 17 00:00:00 2001 From: Jotacarma90 Date: Thu, 18 Jan 2024 15:42:07 +0100 Subject: [PATCH 100/174] fix: added a sleep in the mtime case for test_file_checker. --- CHANGELOG.md | 1 + .../test_file_checks/test_file_checkers.py | 15 +++++++++++++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 23f344d4fc..94f1e4e5a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -39,6 +39,7 @@ All notable changes to this project will be documented in this file. - Fix macOS agents provision to enable registration and connection with managers. ([#4770](https://github.com/wazuh/wazuh-qa/pull/4770/)) \- (Framework) - Fix hardcoded python interpreter in qa_framework role. ([#4658](https://github.com/wazuh/wazuh-qa/pull/4658)) \- (Framework) - Fix duplicated jq dependency ([#4678](https://github.com/wazuh/wazuh-qa/pull/4678)) \- (Framework) +- Fix test_file_checker in check_mtime case ([#4873](https://github.com/wazuh/wazuh-qa/pull/4873)) \- (Tests) ## [4.7.2] - TBD diff --git a/tests/integration/test_fim/test_files/test_file_checks/test_file_checkers.py b/tests/integration/test_fim/test_files/test_file_checks/test_file_checkers.py index 2dcec9c65b..cf19f343cf 100644 --- a/tests/integration/test_fim/test_files/test_file_checks/test_file_checkers.py +++ b/tests/integration/test_fim/test_files/test_file_checks/test_file_checkers.py @@ -71,6 +71,7 @@ from wazuh_testing.tools.monitoring import FileMonitor from wazuh_testing.modules.fim.utils import regular_file_cud from wazuh_testing.tools import PREFIX +from time import sleep # Marks @@ -207,6 +208,16 @@ def test_checkers(file_path, file_attrs, tags_to_apply, triggers_modification, c ''' check_apply_test(tags_to_apply, get_configuration['tags']) + def waitasecond(event): + sleep(1) + + # In the case of CHECK_MTIME only, we need to wait one second after file creation for the timestamp to be different + # (otherwise FIM will not generate alert). + if file_attrs == {CHECK_MTIME}: + regular_file_cud(file_path, wazuh_log_monitor, min_timeout=global_parameters.default_timeout, + validators_after_create=[waitasecond], + options=file_attrs, triggers_modified_event=triggers_modification, escaped=True) # Test files checks. - regular_file_cud(file_path, wazuh_log_monitor, min_timeout=global_parameters.default_timeout, - options=file_attrs, triggers_modified_event=triggers_modification, escaped=True) + else: + regular_file_cud(file_path, wazuh_log_monitor, min_timeout=global_parameters.default_timeout, + options=file_attrs, triggers_modified_event=triggers_modification, escaped=True) From 940fb60fdb5d5a0421cdc5fc3f8276f85a97e6f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 23 Jan 2024 09:00:21 +0000 Subject: [PATCH 101/174] refac: remote operations in install/remote/update package functions --- .../end_to_end/remote_operations_handler.py | 359 +++++++++++++++--- .../end_to_end/vulnerability_detector.py | 145 +++---- .../vuln_packages.json | 108 ++++++ .../wazuh_testing/tools/system.py | 7 + .../cases/test_vulnerability.yaml | 147 ++++--- .../test_vulnerability_detector/conftest.py | 33 +- .../test_vulnerability_detector.py | 46 ++- 7 files changed, 633 insertions(+), 212 deletions(-) create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 50ab7353fa..31e474e4df 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -20,15 +20,15 @@ This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ -import re +import os +import json +import logging from typing import Dict, List from multiprocessing.pool import ThreadPool -from datetime import datetime, timezone -import logging +from datetime import datetime +from concurrent.futures import ThreadPoolExecutor -from wazuh_testing.end_to_end.indexer_api import get_indexer_values from wazuh_testing.tools.system import HostManager -from wazuh_testing.end_to_end.wazuh_api import get_agents_vulnerabilities from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs, monitoring_events_multihost from wazuh_testing.end_to_end.waiters import wait_until_vuln_scan_agents_finished from wazuh_testing.end_to_end.regex import get_event_regex @@ -36,10 +36,22 @@ from wazuh_testing.end_to_end.vulnerability_detector import check_vuln_alert_indexer, check_vuln_state_index -def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_manager: HostManager, - current_datetime: str = None): +def load_packages_metadata(): + """ + Load packages metadata from the packages.json file. """ - Launch a remote operation on the specified host. + packages_filepath = os.path.join(os.path.dirname(__file__), + 'vulnerability_detector_packages', 'vuln_packages.json') + + with open(packages_filepath, 'r') as packages_file: + packages_data = json.load(packages_file) + + return packages_data + + +def install_package(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): + """ + Install a package on the specified host. Args: host (str): The target host on which to perform the operation. @@ -49,33 +61,127 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man Raises: ValueError: If the specified operation is not recognized. """ - logging.critical(f"Launching remote operation: {operation_data}") - + logging.critical(f"Installing package on {host}") host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] host_os_arch = host_manager.get_host_variables(host)['architecture'] system = host_manager.get_host_variables(host)['os_name'] - operation = operation_data['operation'] - if system == 'linux': system = host_manager.get_host_variables(host)['os'].split('_')[0] - if operation == 'install_package': - logging.critical(f"Installing package on {host}") + install_package_data = operation_data['package'] + package_id = None - package_data = operation_data['package'] - package_url = package_data[host_os_name][host_os_arch] - - if isinstance(package_url, list): - for package in package_url: - host_manager.install_package(host, package, system) + if host_os_name in install_package_data: + if host_os_arch in install_package_data[host_os_name]: + package_id = install_package_data[host_os_name][host_os_arch] else: - host_manager.install_package(host, package_url, system) + raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") - logging.critical(f"Package installed on {host}") - logging.critical(f"Waiting for syscollector scan to finish on {host}") + package_data = load_packages_metadata()[package_id] + package_url = package_data['urls'][host_os_name][host_os_arch] + logging.critical(f"Installing package on {host}") + logging.critical(f"Package URL: {package_url}") + + current_datetime = datetime.utcnow().isoformat() + host_manager.install_package(host, package_url, system) + + logging.critical(f"Package installed on {host}") + + if operation_data['check']['alerts'] or operation_data['check']['state_index']: + logging.critical(f"Waiting for syscollector scan to finish on {host}") TIMEOUT_SYSCOLLECTOR_SCAN = 80 + truncate_remote_host_group_files(host_manager, 'agent', 'logs') + + # Wait until syscollector + monitoring_data = generate_monitoring_logs(host_manager, + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], + host_manager.get_group_hosts('agent')) + + result = monitoring_events_multihost(host_manager, monitoring_data) + + logging.critical(f"Syscollector scan finished with result: {result}") + + truncate_remote_host_group_files(host_manager, 'manager', 'logs') + + logging.critical(f"Waiting for vulnerability scan to finish on {host}") + + wait_until_vuln_scan_agents_finished(host_manager) + + logging.critical(f"Checking agent vulnerability on {host}") + + results = { + 'evidences': { + "alerts_not_found": [], + "states_not_found": [] + }, + 'checks': {} + } + + if 'check' in operation_data: + if operation_data['check']['alerts']: + logging.critical(f'Checking vulnerability alerts in the indexer for {host}') + results["alerts_not_found"] = check_vuln_alert_indexer(host_manager, host, package_data, + current_datetime) + + if operation_data['check']['state_index']: + logging.critical(f'Checking vulnerability state index for {host}') + results["states_not_found"] = check_vuln_state_index(host_manager, host, package_data, + current_datetime) + + logging.critical(f"Results: {results}") + + if results['alerts_not_found'] or results['states_not_found']: + results['checks']['all_successfull'] = False + else: + results['checks']['all_successfull'] = True + + return { + f"{host}": results + } + + +def remove_package(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): + """ + Install a package on the specified host. + + Args: + host (str): The target host on which to perform the operation. + operation_data (dict): Dictionary containing operation details. + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + + Raises: + ValueError: If the specified operation is not recognized. + """ + logging.critical(f"Removing package on {host}") + host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] + host_os_arch = host_manager.get_host_variables(host)['architecture'] + system = host_manager.get_host_variables(host)['os_name'] + if system == 'linux': + system = host_manager.get_host_variables(host)['os'].split('_')[0] + + package_data = operation_data['package'] + package_id = None + + if host_os_name in package_data: + if host_os_arch in package_data[host_os_name]: + package_id = package_data[host_os_name][host_os_arch] + else: + raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") + + package_data = load_packages_metadata()[package_id] + logging.critical(f"Removing package on {host}") + uninstall_name = package_data['uninstall_name'] + + current_datetime = datetime.utcnow().isoformat() + host_manager.remove_package(host, uninstall_name, system) + + if operation_data['check']['alerts'] or operation_data['check']['state_index']: + logging.critical(f"Waiting for syscollector scan to finish on {host}") + TIMEOUT_SYSCOLLECTOR_SCAN = 80 truncate_remote_host_group_files(host_manager, 'agent', 'logs') # Wait until syscollector @@ -93,49 +199,167 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man logging.critical(f"Waiting for vulnerability scan to finish on {host}") - # Wait until VD scan wait_until_vuln_scan_agents_finished(host_manager) - elif operation == 'remove_package': - logging.critical(f"Removing package on {host}") - package_data = operation_data['package'] - package_name = package_data[host_os_name][host_os_arch] - host_manager.remove_package(host, package_name, system) + logging.critical(f"Checking agent vulnerability on {host}") + + results = { + 'evidences': { + "alerts_not_found": [], + "states_found": [] + }, + 'checks': {} + } + + logging.critical("Operation data is: {}".format(package_data)) + + if 'check' in operation_data: + if operation_data['check']['alerts'] or operation_data['check']['states']: + if operation_data['check']['alerts']: + logging.critical(f'Checking vulnerability alerts in the indexer for {host}') + results["evidences"]["alerts_not_found"] = check_vuln_alert_indexer(host_manager, host, package_data, + current_datetime, + vuln_mitigated=True) + + if operation_data['check']['state_index']: + logging.critical(f'Checking vulnerability state index for {host}') + states_not_found = check_vuln_state_index(host_manager, host, package_data, + current_datetime, return_found=True) + + results['evidences']["states_found"] = states_not_found + + if results['evidences']['alerts_not_found'] or len(results['evidences']['states_found']) > 0: + results['checks']['all_successfull'] = False + else: + results['checks']['all_successfull'] = True + + return { + f"{host}": results + } + + +def update_package(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): + """ + Install a package on the specified host. + + Args: + host (str): The target host on which to perform the operation. + operation_data (dict): Dictionary containing operation details. + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + + Raises: + ValueError: If the specified operation is not recognized. + """ + logging.critical(f"Updating package on {host}") + + host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] + host_os_arch = host_manager.get_host_variables(host)['architecture'] + system = host_manager.get_host_variables(host)['os_name'] + if system == 'linux': + system = host_manager.get_host_variables(host)['os'].split('_')[0] + + install_package_data_from = operation_data['package']['from'] + install_package_data_to= operation_data['package']['to'] - TIMEOUT_SYSCOLLECTOR_SCAN = 60 + package_id_from = None + package_id_to = None + if host_os_name in install_package_data_from: + if host_os_arch in install_package_data_from[host_os_name]: + package_id_from = install_package_data_from[host_os_name][host_os_arch] + else: + raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") + + if host_os_name in install_package_data_to: + if host_os_arch in install_package_data_to[host_os_name]: + package_id_to = install_package_data_to[host_os_name][host_os_arch] + else: + raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") + + package_data_from = load_packages_metadata()[package_id_from] + package_data_to = load_packages_metadata()[package_id_to] + + package_url_from = package_data_from['urls'][host_os_name][host_os_arch] + package_url_to = package_data_to['urls'][host_os_name][host_os_arch] + + logging.critical(f"Installing package on {host}") + logging.critical(f"Package URL: {package_url_to}") + + current_datetime = datetime.utcnow().isoformat() + host_manager.install_package(host, package_url_to, system) + + logging.critical(f"Package installed on {host}") + + if operation_data['check']['alerts'] or operation_data['check']['state_index']: + logging.critical(f"Waiting for syscollector scan to finish on {host}") + TIMEOUT_SYSCOLLECTOR_SCAN = 80 truncate_remote_host_group_files(host_manager, 'agent', 'logs') + # Wait until syscollector monitoring_data = generate_monitoring_logs(host_manager, - [get_event_regex({'event': 'syscollector_scan_start'}), - get_event_regex({'event': 'syscollector_scan_end'})], - [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], host_manager.get_group_hosts('agent')) + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], + host_manager.get_group_hosts('agent')) - monitoring_events_multihost(host_manager, monitoring_data) + result = monitoring_events_multihost(host_manager, monitoring_data) + + logging.critical(f"Syscollector scan finished with result: {result}") truncate_remote_host_group_files(host_manager, 'manager', 'logs') - # Wait until VD scan + logging.critical(f"Waiting for vulnerability scan to finish on {host}") + wait_until_vuln_scan_agents_finished(host_manager) - elif operation == 'check_agent_vulnerability': logging.critical(f"Checking agent vulnerability on {host}") results = { - "alerts_not_found": [], - "states_not_found": [] + 'evidences': { + "alerts_not_found_from": [], + "states_found_from": [], + "alerts_not_found_to": [], + "states_not_found_to": [], + }, + 'checks': {} } - if operation_data['parameters']['alert_indexed']: - logging.critical(f'Checking vulnerability alerts in the indexer for {host}') - results["alerts_not_found"] = check_vuln_alert_indexer(host_manager, operation_data['vulnerability_data'], current_datetime) + if 'check' in operation_data: + if operation_data['check']['alerts']: + logging.critical(f'Checking vulnerability alerts in the indexer for {host}. Expected CVE mitigation') + results["evidences"]["alerts_not_found_from"] = check_vuln_alert_indexer(host_manager, host, package_data_from, + current_datetime, + vuln_mitigated=True) + + if operation_data['check']['state_index']: + logging.critical(f'Checking vulnerability state index for {host}') + states_not_found = check_vuln_state_index(host_manager, host, package_data_from, + current_datetime, return_found=True) + results['evidences']["states_found_from"] = states_not_found + + logging.critical(f'Checking vulnerability alerts in the indexer for {host}. Expected CVE vuln of new package version') + + if operation_data['check']['alerts']: + logging.critical(f'Checking vulnerability alerts in the indexer for {host}') + results["alerts_not_found_to"] = check_vuln_alert_indexer(host_manager, host, package_data_to, + current_datetime) + + if operation_data['check']['state_index']: + logging.critical(f'Checking vulnerability state index for {host}') + results["states_not_found_to"] = check_vuln_state_index(host_manager, host, package_data_to, + current_datetime) - if operation_data['parameters']['state_indice']: - logging.critical(f'Checking vulnerability state index for {host}') - results["states_not_found"] = check_vuln_state_index(host_manager, operation_data['vulnerability_data'], current_datetime) + logging.critical(f"Results: {results}") - assert len(results["alerts_not_found"]) == 0 and len(results["states_not_found"]) == 0, \ - f"Vulnerability alerts or states not found for {host}: {results}" + if results['evidences']['alerts_not_found_from'] or len(results['evidences']['states_found_from']) > 0 or \ + results['evidences']['alerts_not_found_to'] or results['evidences']['states_not_found_to']: + results['checks']['all_successfull'] = False + else: + results['checks']['all_successfull'] = True + + return { + f"{host}": results + } def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict], host_manager: HostManager): @@ -152,10 +376,21 @@ def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict if task_list: for task in task_list: - launch_remote_operation(agent, task, host_manager, timestamp) + operation = task['operation'] + if operation in locals(): + locals()[operation](agent, task, host_manager, timestamp) + + +def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): + operation = operation_data['operation'] + if operation in globals(): + operation_result = globals()[operation](host, operation_data, host_manager) + return operation_result + else: + raise ValueError(f"Operation {operation} not recognized") -def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager): +def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager, target_to_ignore: []): """ Launch parallel remote operations on multiple hosts. @@ -163,14 +398,24 @@ def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager) task_list (list): List of dictionaries containing operation details. host_manager (HostManager): An instance of the HostManager class containing information about hosts. """ - for task in task_list: - logging.critical(f"Launching parallel task: {task}") - parallel_configuration = [] - target = task['target'] + results = {} + + if target_to_ignore: + for target in results: + results[target]['checks']['all_successfull'] = False + + def launch_and_store_result(args): + host, task, manager = args + result = launch_remote_operation(host, task, manager) + results.update(result) + + with ThreadPoolExecutor() as executor: + # Submit tasks asynchronously + futures = [executor.submit(launch_and_store_result, (host, task, host_manager)) + for task in task_list for host in host_manager.get_group_hosts(task['target'] - target_to_ignore)] - for host in host_manager.get_group_hosts(target): - parallel_configuration.append((host, task, host_manager)) + # Wait for all tasks to complete + for future in futures: + future.result() - with ThreadPool() as pool: - # Use the pool to map the function to the list of hosts - pool.starmap(launch_remote_operation, parallel_configuration) + return results diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py index 4d428100bf..a124aee7ab 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py @@ -6,7 +6,8 @@ import re -def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[str, Dict], current_datetime: str = None): +def check_vuln_state_index(host_manager: HostManager, host: str, package: Dict[str, Dict], + current_datetime: str = "", return_found: bool = False): """ Check vulnerability state index for a host. @@ -20,49 +21,35 @@ def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[s index_vuln_state_content = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', greater_than_timestamp=current_datetime)['hits']['hits'] expected_alerts_not_found = [] + expected_alerts_found = [] - logging.critical(f"Checking vulnerability state index {vulnerability_data}") + logging.critical(f"Checking vulnerability state index {package}") + vulnerabilities = package['CVE'] - for agent in host_manager.get_group_hosts('agent'): - logging.critical(f"Checking vulnerability state index for {agent}") + for vulnerability in vulnerabilities: + found = False + for indice_vuln in index_vuln_state_content: + state_agent = indice_vuln['_source']['agent']['name'] + state_cve = indice_vuln["_source"]['vulnerability']['id'] + state_package_name = indice_vuln['_source']['package']['name'] + state_package_version = indice_vuln['_source']['package']['version'] - host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] - host_os_arch = host_manager.get_host_variables(agent)['architecture'] + if state_agent == host and state_cve == vulnerability \ + and state_package_name == package['package_name'] and \ + state_package_version == package['package_version']: + found = True + expected_alerts_found.append(vulnerability) - logging.critical(f"Host OS name: {host_os_name}") - logging.critical(f"Host OS arch: {host_os_arch}") - - if host_os_name in vulnerability_data: - if host_os_arch in vulnerability_data[host_os_name]: - logging.critical(f"Inside Host OS arch: {host_os_arch}") - - vulnerabilities = vulnerability_data[host_os_name][host_os_arch] - - logging.critical(f"Vulnerabilities: {vulnerabilities}") - - for vulnerability in vulnerabilities: - found = False - for indice_vuln in index_vuln_state_content: - logging.critical(f"Indice vuln: {indice_vuln}") - - state_agent = indice_vuln['_source']['agent']['name'] - state_cve = indice_vuln["_source"]['vulnerability']['id'] - state_package_name = indice_vuln['_source']['package']['name'] - state_package_version = indice_vuln['_source']['package']['version'] - - if state_agent == agent and state_cve == vulnerability['CVE'] \ - and state_package_name == vulnerability['PACKAGE_NAME'] and \ - state_package_version == vulnerability['PACKAGE_VERSION']: - found = True - - if not found: - expected_alerts_not_found.append(vulnerability) + if not found: + expected_alerts_not_found.append(vulnerability) logging.critical(f"Expected alerts not found: {expected_alerts_not_found}") logging.critical(f"Triggered alerts: {index_vuln_state_content}") - return expected_alerts_not_found - + if return_found: + return expected_alerts_found + else: + return expected_alerts_not_found def get_alerts_by_agent(alerts, regex): @@ -84,9 +71,9 @@ def get_alerts_by_agent(alerts, regex): agent = alert['_source']['agent']['name'] if agent not in alerts_vuln_by_agent: alerts_vuln_by_agent[agent] = [] - else: - alerts_vuln_by_agent[agent].append(alert) + alerts_vuln_by_agent[agent].append(alert) + logging.critical(f"Alerts by agent: {alerts_vuln_by_agent}") return alerts_vuln_by_agent @@ -103,8 +90,8 @@ def get_indexed_vulnerabilities_by_agent(indexed_vulnerabilities): return vulnerabilities_by_agent -def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict[str, Dict], - current_datetime: str = ''): +def check_vuln_alert_indexer(host_manager: HostManager, host: str, package: Dict[str, Dict], + current_datetime: str = '', vuln_mitigated: bool = False): """ Check vulnerability alerts in the indexer for a host. @@ -115,57 +102,47 @@ def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict Returns: list: List of vulnerability alerts. """ - regex_cve_affects = "CVE.* affects .*" - regex_solved_vuln = "The .* that affected .* was solved due to a package removal" + logging.critical(f"Checking vulnerability alerts in the indexer {package}") - logging.critical(f"Checking vulnerability alerts in the indexer {vulnerability_data}") + regex_to_match = "CVE.* affects .*" + if vuln_mitigated: + regex_to_match = "The .* that affected .* was solved due to a package removal" indexer_alerts = get_indexer_values(host_manager, greater_than_timestamp=current_datetime)['hits']['hits'] - # Get CVE affects alerts for all agents - detected_vuln_alerts_by_agent = get_alerts_by_agent(indexer_alerts, regex_cve_affects) - solved_alerts_by_agent = get_alerts_by_agent(indexer_alerts, regex_solved_vuln) + alerts_global = get_alerts_by_agent(indexer_alerts, regex_to_match) + + if host in alerts_global: + triggered_alerts = alerts_global[host] + else: + triggered_alerts = [] + + logging.critical(f"Triggered alerts: {triggered_alerts}") - triggered_alerts = detected_vuln_alerts_by_agent expected_alerts_not_found = [] - if 'state' in vulnerability_data and not vulnerability_data['state']: - triggered_alerts = solved_alerts_by_agent - - for agent in host_manager.get_group_hosts('agent'): - logging.critical(f"Checking vulnerability alerts for {agent}") - host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] - host_os_arch = host_manager.get_host_variables(agent)['architecture'] - logging.critical(f"Host OS name: {host_os_name}") - logging.critical(f"Host OS arch: {host_os_arch}") - logging.critical(f"Check1: {host_os_arch in vulnerability_data}") - logging.critical(f"Check2: {host_os_name in vulnerability_data}") - - if host_os_name in vulnerability_data: - if host_os_arch in vulnerability_data[host_os_name]: - logging.critical(f"Inside Host OS arch: {host_os_arch}") - vulnerabilities = vulnerability_data[host_os_name][host_os_arch] - for vulnerability in vulnerabilities: - - logging.critical(f"Checking vulnerability: {vulnerability}") - - cve = vulnerability['CVE'] - package = vulnerability['PACKAGE_NAME'] - version = vulnerability['PACKAGE_VERSION'] - found = False - for triggered_alert in triggered_alerts[agent]: - alert_package_name = triggered_alert['_source']['data']['vulnerability']['package']["name"] - alert_package_version = \ - triggered_alert['_source']['data']['vulnerability']['package']['version'] - alert_cve = triggered_alert['_source']['data']['vulnerability']['cve'] - - if alert_cve == cve and alert_package_name == package and \ - alert_package_version == version: - found = True - - if not found: - print(f"Vulnerability not found: {vulnerability}") - expected_alerts_not_found.append(vulnerability) + for cve in package['CVE']: + logging.critical(f"Checking vulnerability: {cve}") + + package_name = package['package_name'] + package_version = package['package_version'] + + found = False + + for triggered_alert in triggered_alerts: + alert_package_name = triggered_alert['_source']['data']['vulnerability']['package']["name"] + alert_package_version = \ + triggered_alert['_source']['data']['vulnerability']['package']['version'] + alert_cve = triggered_alert['_source']['data']['vulnerability']['cve'] + + if alert_cve == cve and alert_package_name == package_name and \ + alert_package_version == package_version: + found = True + + if not found: + print(f"Vulnerability not found: {cve} for package {package} {package_version}") + expected_alerts_not_found.append({'CVE': cve, 'PACKAGE_NAME': package_name, + 'PACKAGE_VERSION': package_version}) logging.critical(f"Expected alerts not found: {expected_alerts_not_found}") logging.critical(f"Triggered alerts: {triggered_alerts}") diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json new file mode 100644 index 0000000000..faefd51aa2 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json @@ -0,0 +1,108 @@ +{ + "nmap-6.46": { + "package_name": "nmap", + "package_version": "6.46-1", + "CVE": ["CVE-2018-15173"], + "urls": { + "centos": { + "amd64": "https://nmap.org/dist/nmap-6.46-1.x86_64.rpm" + } + }, + "uninstall_name": "nmap*" + }, + "grafana-8.5.5": { + "package_name": "grafana", + "package_version": "8.5.5", + "CVE": ["CVE-2023-2183"], + "urls": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.aarch64.rpm" + } + }, + "uninstall_name": "grafana*" + }, + + + https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb + https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb + https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb + https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_arm64.deb + https://dl.grafana.com/enterprise/release/grafana-enterprise_9.4.17_arm64.deb + https://dl.grafana.com/enterprise/release/grafana-enterprise_9.5.13_arm64.deb + https://dl.grafana.com/enterprise/release/grafana-enterprise_10.0.0_arm64.deb + + + + + + + + + + + + + + + + + + + "vlc-3.0.6": { + "package_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", + "package_version": "3.0.6", + "CVE": ["CVE-2019-12874"], + "urls": { + "windows": { + "amd64": "https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe" + } + }, + "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" + }, + "node-17.0.1": { + "package_name": "node", + "package_version": "17.0.1", + "CVE": ["CVE-2022-21824"], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg", + "arm64v8": "https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg" + } + }, + "uninstall_name": "node*" + }, + "lynx-2.8.8":{ + "package_name": "lynx", + "package_version": "2.8.8-0.3.dev15.el7", + "CVE": ["CVE-2021-38165"], + "urls": { + "centos": { + "amd64": "https://download.cf.centos.org/centos/7/os/x86_64/Packages/lynx-2.8.8-0.3.dev15.el7.x86_64.rpm" + } + }, + "uninstall_name": "lynx*" + }, + "firefox-78.9.0":{ + "package_name": "firefox", + "package_version": "78.9.0-1.el7.centos", + "CVE": ["CVE-2023-6873", "CVE-2023-6872", "CVE-2022-38478"], + "urls": { + "centos": { + "amd64": "https://download.cf.centos.org/centos/7/updates/x86_64/Packages/firefox-78.9.0-1.el7.centos.x86_64.rpm" + } + }, + "uninstall_name": "firefox*" + }, + "firefox-91.13.0":{ + "package_name": "firefox", + "package_version": "91.13.0-1.el7.centos", + "CVE": ["CVE-2023-6873", "CVE-2023-6872"], + "urls": { + "centos": { + "amd64": "https://download.cf.centos.org/centos/7/updates/x86_64/Packages/firefox-91.13.0-1.el7.centos.x86_64.rpm" + } + }, + "uninstall_name": "firefox*" + } +} diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index a1719b0abd..02f4ea38fa 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -553,14 +553,21 @@ def remove_package(self, host, package_name, system): Example: host_manager.remove_package('my_host', 'my_package', system='ubuntu') """ + logging.critical(f"Removing package {package_name} from {host}") + logging.critical(f"System: {system}") + logging.critical(f"Host variables: {self.get_host_variables(host)}") + result = False + os_name = self.get_host_variables(host)['os_name'] if os_name == 'windows': result = self.get_host(host).ansible("win_command", f"& '{package_name}' /S", check=False) elif os_name == 'linux': os = self.get_host_variables(host)['os'].split('_')[0] if os == 'centos': + logging.critical(f"Centos!") result = self.get_host(host).ansible("yum", f"name={package_name} state=absent", check=False) + logging.critical(f"Result: {result}") elif os == 'ubuntu': result = self.get_host(host).ansible("apt", f"name={package_name} state=absent", check=False) diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 20d7cbbab6..3d3aabcd65 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -1,66 +1,103 @@ -- case: "Installation of a vulnerable package" - id: "install_package" - description: "Installation of a vulnerable package" - preconditions: null - body: - tasks: +#- case: "Installation of a vulnerable package" +# id: "install_package" +# description: "Installation of a vulnerable package" +# preconditions: null +# body: +# tasks: +# - operation: install_package +# target: agent +# check: +# alerts: True +# state_index: True +# package: +# centos: +# # amd64: nmap-6.46 +# amd64: lynx-2.8.8 +# arm64v8: grafana-enterprise-8.5.5 +# ubuntu: +# amd64: grafana-enterprise-8.5.5 +# windows: +# amd64: vlc-3.0.6 +# macos: +# amd64: node-v17.0.1 + +- case: "Upgrade" + id: "upgrade_package" + description: "Upgrade of a vulnerable package" + preconditions: + tasks: - operation: install_package target: agent + check: + alerts: True + state_index: True package: centos: - # amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.x86_64.rpm - amd64: https://nmap.org/dist/nmap-6.46-1.x86_64.rpm - arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.aarch64.rpm + amd64: firefox-78.9.0 + arm64v8: grafana-enterprise-8.5.5 ubuntu: - amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb - arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb + amd64: grafana-enterprise-8.5.5 windows: - amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe + amd64: vlc-3.0.6 macos: - amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg - arm64v8: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg - - - operation: check_agent_vulnerability + amd64: node-v17.0.1 + arm64v8: node-v17.0.1 + body: + tasks: + - operation: update_package target: agent - parameters: - alert_indexed: True - state_indice: True - vulnerability_data: - centos: - amd64: - # - PACKAGE_NAME: "grafana" - # PACKAGE_VERSION: "8.5.5" - # CVE: CVE-2023-2183 - - PACKAGE_NAME: "nmap" - PACKAGE_VERSION: "6.46-1" - CVE: CVE-2018-15173 - arm64v8: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" - CVE: CVE-2023-2183 - ubuntu: - amd64: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" - CVE: CVE-2023-2183 - arm64v8: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" - CVE: CVE-2023-2183 - windows: - amd64: - - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" - PACKAGE_VERSION: "3.0.6" - CVE: CVE-2019-12874 - macos: - amd64: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.0.1" - CVE: CVE-2022-21824 - arm64v8: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.0.1" - CVE: CVE-2022-21824 + check: + alerts: True + state_index: True + package: + from: + centos: + amd64: firefox-78.9.0 + arm64v8: grafana-enterprise-8.5.5 + ubuntu: + amd64: grafana-enterprise-8.5.5 + windows: + amd64: vlc-3.0.6 + macos: + amd64: node-v17.0.1 + arm64v8: node-v17.0.1 + to: + centos: + # amd64: nmap-6.46 + amd64: firefox-91.13.0 + arm64v8: grafana-enterprise-8.5.5 + ubuntu: + amd64: grafana-enterprise-8.5.5 + windows: + amd64: vlc-3.0.6 + macos: + amd64: node-v17.0.1 + arm64v8: node-v17.0.1 + + +# - case: "Removal of a vulnerable package" +# id: "remove_package" +# description: "Remove of a vulnerable package" +# preconditions: null +# body: +# tasks: +# - operation: remove_package +# target: agent +# check: +# alerts: True +# state_index: True +# package: +# centos: +# # amd64: nmap-6.46 +# amd64: lynx-2.8.8 +# arm64v8: grafana-enterprise-8.5.5 +# ubuntu: +# amd64: grafana-enterprise-8.5.5 +# windows: +# amd64: vlc-3.0.6 +# macos: +# amd64: node-v17.0.1 +# arm64v8: node-v17.0.1 # - case: "Upgrade of a vulnerable package: Remain vulnerable" diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index 4893a92fef..07e48f993a 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -50,6 +50,8 @@ def collect_evidences(test_name, host_manager, evidences) -> None: current_dir = os.path.dirname(__file__) vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") tests_evidences_directory = os.path.join(str(vulnerability_detector_logs_dir), str(test_name)) + logging.critical(f"Collecting evidences for {test_name}") + logging.critical(evidences) if evidences: logging.info(f"Collecting custom evidences for {test_name}") @@ -105,14 +107,30 @@ def setup(preconditions, teardown, host_manager): """ """ if preconditions: - print("Configuyring preconditions") - launch_parallel_operations(preconditions['tasks'], host_manager) + result = launch_parallel_operations(preconditions['tasks'], host_manager) + + + for host in result.keys(): + if result[host]['checks']['all_successfull'] is False: + success_for_all_agents = False + logging.critical(f"Test failed for host {host}. Check logs for more information") + logging.critical(f"Evidences: {result[host]['evidences']}") - yield + + yield result if teardown: - print("Configuring teardonw") - launch_parallel_operations(teardown, host_manager) + result = launch_parallel_operations(teardown, host_manager) + + for host in result.keys(): + if result[host]['checks']['all_successfull'] is False: + success_for_all_agents = False + logging.critical(f"Test failed for host {host}. Check logs for more information") + logging.critical(f"Evidences: {result[host]['evidences']}") + + + + @pytest.fixture(scope='session', autouse=True) @@ -122,7 +140,7 @@ def handle_logs(): os.makedirs(logs_dir, exist_ok=True) yield - + shutil.rmtree(logs_dir, ignore_errors=True) @@ -235,7 +253,8 @@ def pytest_runtest_makereport(item, call): if 'host_manager' in item.funcargs: evidences = None if 'get_results' in item.funcargs: - evidences = item.funcargs['get_results'] + test_result = item.funcargs['get_results'] + evidences = test_result.get_evidences() collect_evidences(item._request.node.name, item.funcargs['host_manager'], evidences) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 1f66bd71b8..6cb8127a66 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -136,6 +136,9 @@ def __init__(self): } } + def get_evidences(self): + return self.evidences + def summary(self): """Print a summary of the results of the tests""" if any(self.checks.values()): @@ -598,7 +601,7 @@ def tests_syscollector_first_second_scan_consistency_alerts(self, host_manager, if not results.checks['vulnerabilities_equal_between_scans']: pytest.fail("Test failed. Check logs for more information") - def tests_syscollector_first_second_scan_consistency_index(self, host_manager, setup_vulnerability_tests, + def tests_syscollector_first_second_scan_consistency_index(self, host_manager, setup_vulnerability_tests, get_results): results = get_results test_name = 'tests_syscollector_first_second_scan_consistency_index' @@ -644,13 +647,38 @@ def tests_syscollector_first_second_scan_consistency_index(self, host_manager, s depends=case['depends']) for case in cases] list_ids = [case['id'] for case in cases] +class TestScanSyscollectorCases(): + results = {} + + @pytest.fixture(scope='class') + def get_results(self): + return self.results + + + @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) + #def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager): + def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager, get_results): + setup_results = setup + results = get_results + + if len(results_) + + + + logger.critical("Starting scan cases tests") + logger.critical(f"Case Info: {body}") + + setup_results = setup + + # Launch tests tasks + result = launch_parallel_operations(body['tasks'], host_manager, setup_results.keys()) + + success_for_all_agents = True -@pytest.mark.dependency() -@pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) -#def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager): -def test_vulnerability_detector_scans_cases(preconditions, body, teardown, setup, host_manager): - logger.critical("Starting scan cases tests") - logger.critical(f"Case Info: {body}") + for host in result.keys(): + if result[host]['checks']['all_successfu1ll'] is False: + success_for_all_agents = False + logger.critical(f"Test failed for host {host}. Check logs for more information") + logger.critical(f"Evidences: {result[host]['evidences']}") - # Launch tests tasks - launch_parallel_operations(body['tasks'], host_manager) + assert success_for_all_agents is True, "Test failed. Check logs for more information" From a74db5efe010e6488b48e2dad29ef831961d5946 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 23 Jan 2024 09:26:20 +0000 Subject: [PATCH 102/174] style: fix minor style errors in soem E2E libraries --- .../wazuh_testing/end_to_end/__init__.py | 5 +---- .../wazuh_testing/end_to_end/configuration.py | 5 +++-- .../wazuh_testing/end_to_end/indexer_api.py | 11 +++++------ .../wazuh_testing/wazuh_testing/end_to_end/logs.py | 13 +++++++++++-- .../end_to_end/remote_operations_handler.py | 2 +- .../test_vulnerability_detector/conftest.py | 1 - .../test_vulnerability_detector.py | 14 +++++++++----- 7 files changed, 30 insertions(+), 21 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py b/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py index 8a47616f32..c107f5bedd 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/__init__.py @@ -11,19 +11,16 @@ fetched_alerts_json_path = os.path.join(gettempdir(), 'alerts.json') - base_path = { 'linux': '/var/ossec', - 'windows': 'C:\Program Files (x86)\ossec-agent', + 'windows': r'C:\Program Files (x86)\ossec-agent', 'macos': '/Library/Ossec' } - configuration_filepath_os = { 'linux': os.path.join(base_path['linux'], 'etc', 'ossec.conf'), 'windows': os.path.join(base_path['windows'], 'ossec.conf'), 'macos': os.path.join(base_path['macos'], 'etc', 'ossec.conf') } - logs_filepath_os = { 'linux': os.path.join(base_path['linux'], 'logs', 'ossec.log'), 'windows': os.path.join(base_path['windows'], 'ossec.log'), diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py index d87f1ba6a2..c34f7776f5 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -2,12 +2,13 @@ Configurations handler for remote hosts. ---------------------------------------- -This module provides functions for configuring and managing host +This module provides functions for configuring and managing remote host configurations using the HostManager class and related tools. Functions: - backup_configurations: Backup configurations for all hosts in the specified host manager. - - restore_backup: Restore configurations for all hosts in the specified host manager. + - restore_configuration: Restore configurations for all hosts in the specified host manager. + - configure_host: Configure a specific host. - configure_environment: Configure the environment for all hosts in the specified host manager. diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index 877ba8819c..7c6ff1667a 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -1,5 +1,5 @@ """ -Wazuh Elasticsearch Indexer Module. +Wazuh API Indexer Module. ----------------------------------- This module provides functions to interact with the Wazuh Indexer API. @@ -13,6 +13,7 @@ This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ import requests +import logging from typing import Dict from wazuh_testing.tools.system import HostManager @@ -31,12 +32,12 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' credentials (Optional): A dictionary containing the Indexer credentials. Defaults to {'user': 'admin', 'password': 'changeme'}. index (Optional): The Indexer index name. Defaults to 'wazuh-alerts*'. + greater_than_timestamp (Optional): The timestamp to filter the results. Defaults to None. Returns: - str: The response text from the indexer API. + Dict: A dictionary containing the values retrieved from the Indexer API. """ - print('Getting values from the Indexer API') - + logging.debug(f"Getting values from the Indexer API for index {index}.") url = f"https://{host_manager.get_master_ip()}:9200/{index}/_search" headers = { 'Content-Type': 'application/json', @@ -74,8 +75,6 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' 'size': 10000, } - print(data) - response = requests.get(url=url, params=param, verify=False, auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password']), headers=headers, json=data) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py index f566a63b8a..f0447627cb 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py @@ -19,7 +19,7 @@ def truncate_remote_host_group_files(host_manager: HostManager, host_group: str, - file_to_truncate: str='logs'): + file_to_truncate: str = 'logs') -> None: """ Truncate log or alert files on remote hosts in a specified host group. @@ -30,17 +30,26 @@ def truncate_remote_host_group_files(host_manager: HostManager, host_group: str, Possible values are 'logs' for log files or 'alerts' for alert files. """ for host in host_manager.get_group_hosts(host_group): - log_file_path = None if file_to_truncate == 'logs': host_os_name = host_manager.get_host_variables(host)['os_name'] log_file_path = logs_filepath_os[host_os_name] elif file_to_truncate == 'alerts': log_file_path = ALERTS_JSON_PATH + else: + log_file_path = file_to_truncate host_manager.truncate_file(host, log_file_path) def get_hosts_logs(host_manager: HostManager, host_group: str = 'all') -> dict: + """ + Get the logs from the specified host group. + + Parameters: + - host_manager (HostManager): An instance of the HostManager class for managing remote hosts. + - host_group (str, optional): The name of the host group where the files will be truncated. + Default is 'all'. + """ host_logs = {} for host in host_manager.get_group_hosts(host_group): host_os_name = host_manager.get_host_variables(host)['os_name'] diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 31e474e4df..cc5275637a 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -390,7 +390,7 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man raise ValueError(f"Operation {operation} not recognized") -def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager, target_to_ignore: []): +def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager, target_to_ignore: List[str] = []): """ Launch parallel remote operations on multiple hosts. diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index 07e48f993a..a0b8419aa5 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -109,7 +109,6 @@ def setup(preconditions, teardown, host_manager): if preconditions: result = launch_parallel_operations(preconditions['tasks'], host_manager) - for host in result.keys(): if result[host]['checks']['all_successfull'] is False: success_for_all_agents = False diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 6cb8127a66..ead2850fcd 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -660,18 +660,22 @@ def get_results(self): def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager, get_results): setup_results = setup results = get_results + hosts_to_ignore = [] - if len(results_) - + for host in setup_results.keys(): + if setup_results[host]['checks']['all_successfull'] is False: + hosts_to_ignore.append(host) + if len(hosts_to_ignore) > 0: + logger.critical(f"Setup test failed for hosts {hosts_to_ignore}. Check logs for more information") + logger.critical(f"Evidences: {setup_results}") + logger.critical("Ignoring these hosts for the rest of the test") logger.critical("Starting scan cases tests") logger.critical(f"Case Info: {body}") - setup_results = setup - # Launch tests tasks - result = launch_parallel_operations(body['tasks'], host_manager, setup_results.keys()) + result = launch_parallel_operations(body['tasks'], host_manager, hosts_to_ignore) success_for_all_agents = True From e696a548a8a0d61feac0bba00533bb8882213259 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 23 Jan 2024 10:13:52 +0000 Subject: [PATCH 103/174] fix: macOS provisioning --- provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml index 5b653196a2..fd9b5bd841 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml @@ -14,7 +14,7 @@ owner: root group: wazuh mode: 0644 - notify: macOS | Restart Wazuh Agent + notify: MacOS | restart wazuh-agent tags: - init - config @@ -33,7 +33,7 @@ owner: root group: wazuh mode: 0640 - notify: macOS | Restart Wazuh Agent + notify: MacOS | restart wazuh-agent tags: - init - config @@ -51,4 +51,3 @@ - authd_pass | length > 0 tags: - config - From 531e1fc89b33009171d908cd7cf70f5c1b5fd20c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 23 Jan 2024 10:14:08 +0000 Subject: [PATCH 104/174] style: improve typing in monitoring and configuration --- .../wazuh_testing/end_to_end/configuration.py | 9 +++++---- .../wazuh_testing/wazuh_testing/end_to_end/monitoring.py | 3 ++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py index c34f7776f5..88829f3101 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -17,6 +17,7 @@ This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ from multiprocessing.pool import ThreadPool +from typing import Dict import xml.dom.minidom from wazuh_testing.end_to_end import configuration_filepath_os @@ -24,7 +25,7 @@ from wazuh_testing.tools.system import HostManager -def backup_configurations(host_manager: HostManager) -> dict: +def backup_configurations(host_manager: HostManager) -> Dict[str, str]: """ Backup configurations for all hosts in the specified host manager. @@ -45,7 +46,7 @@ def backup_configurations(host_manager: HostManager) -> dict: return backup_configurations -def restore_configuration(host_manager: HostManager, configuration: dict) -> None: +def restore_configuration(host_manager: HostManager, configuration: Dict[str, str]) -> None: """ Restore configurations for all hosts in the specified host manager. @@ -61,7 +62,7 @@ def restore_configuration(host_manager: HostManager, configuration: dict) -> Non host_manager.modify_file_content(host, configuration_filepath, configuration[host]) -def configure_host(host: str, host_configuration: dict, host_manager: HostManager) -> None: +def configure_host(host: str, host_configuration: Dict[str, Dict], host_manager: HostManager) -> None: """ Configure a specific host. @@ -100,7 +101,7 @@ def configure_host(host: str, host_configuration: dict, host_manager: HostManage host_manager.modify_file_content(str(host), config_file_path, final_configuration) -def configure_environment(host_manager: HostManager, configurations: dict) -> None: +def configure_environment(host_manager: HostManager, configurations: Dict[str, str]) -> None: """ Configure the environment for all hosts in the specified host manager. diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index 84a8e5e0e5..325dfcf61b 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -20,7 +20,6 @@ import re from time import sleep from typing import Dict, List -from multiprocessing.pool import ThreadPool from concurrent.futures import ThreadPoolExecutor, as_completed from wazuh_testing.end_to_end import logs_filepath_os @@ -35,6 +34,7 @@ def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict Args: host_manager: An instance of the HostManager class containing information about hosts. monitoring_data: A dictionary containing monitoring data for each host. + ignore_error: If True, ignore errors and continue monitoring. """ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict], scan_interval: int = 20, ignore_error=False): @@ -45,6 +45,7 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: host_manager (HostManager): Host Manager to handle the environment host (str): The target host. monitoring_elements(List): A list of dictionaries containing regex, timeout, and file. + ignore_error: If True, ignore errors and continue monitoring. Raises: TimeoutError: If no match is found within the specified timeout. From 80a0574ede18a2c67def474faefed17b5799e8d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 23 Jan 2024 10:55:34 +0000 Subject: [PATCH 105/174] fix: macos provisioning restart task --- provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml index 9b96391f3a..4285e2bc3c 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml @@ -6,4 +6,4 @@ win_service: name=WazuhSvc start_mode=auto state=restarted - name: MacOS | restart wazuh-agent - ansible.builtin.shell: "{{ wazuh_macos_dir }}/bin/wazuh-control restart" + ansible.builtin.shell: "{{ macos_wazuh_dir }}/bin/wazuh-control restart" From 2c7b08839d84d9ba0fa92051424c720b3d22bbe9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 11:51:13 +0000 Subject: [PATCH 106/174] style: fix pep8 indexer_api --- deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index 7c6ff1667a..d06f44844a 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -76,7 +76,8 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' } response = requests.get(url=url, params=param, verify=False, - auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password']), headers=headers, + auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password']), + headers=headers, json=data) return response.json() From 1c55cfb36d2b70f69a3bfecaac931136d63c22d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 11:51:51 +0000 Subject: [PATCH 107/174] refac: monitoring module --- .../wazuh_testing/end_to_end/monitoring.py | 105 +++++++++--------- 1 file changed, 51 insertions(+), 54 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index 325dfcf61b..f1440aa296 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -7,9 +7,8 @@ Functions: - monitoring_events_multihost: Monitor events on multiple hosts concurrently. - - generate_monitoring_logs_all_agent: Generate monitoring data for logs on all agent hosts. + - generate_monitoring_logs: Generate monitoring data for logs on all agent hosts. - generate_monitoring_logs_manager: Generate monitoring data for logs on a specific manager host. - - generate_monitoring_alerts_all_agent: Generate monitoring data for alerts on all agent hosts. Copyright (C) 2015, Wazuh Inc. @@ -18,16 +17,17 @@ """ import re +import logging from time import sleep +from datetime import datetime from typing import Dict, List from concurrent.futures import ThreadPoolExecutor, as_completed from wazuh_testing.end_to_end import logs_filepath_os -from wazuh_testing.end_to_end.regex import get_event_regex from wazuh_testing.tools.system import HostManager -def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict, ignore_error=False) -> Dict: +def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict, ignore_error: bool = False) -> Dict: """ Monitor events on multiple hosts concurrently. @@ -37,7 +37,7 @@ def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict ignore_error: If True, ignore errors and continue monitoring. """ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict], scan_interval: int = 20, - ignore_error=False): + ignore_error: bool = False) -> Dict: """ Monitor the specified elements on a host. @@ -50,12 +50,35 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: Raises: TimeoutError: If no match is found within the specified timeout. """ + def filter_events_by_timestamp(match_events: List) -> List: + """ + Filter events by timestamp. + + Args: + match_events (List): A list of events. + + Returns: + List: A list of events that fit the timestamp. + """ + match_that_fit_timestamp = [] + for match in match_events: + if len(match.groups()) > 1: + timestamp_str = match.groups()[0] + timestamp_format = "%Y/%m/%d %H:%M:%S" + timestamp_datetime = datetime.strptime(timestamp_str, timestamp_format) + if timestamp_datetime >= greater_than_timestamp: + match_that_fit_timestamp.append(match) + + return match_that_fit_timestamp + elements_not_found = [] elements_found = [] for element in monitoring_elements: - regex, timeout, monitoring_file, n_iterations = element['regex'], element['timeout'], element['file'], \ - element['n_iterations'] + regex, timeout, monitoring_file, n_iterations, greater_than_timestamp = element['regex'], \ + element['timeout'], element['file'], \ + element['n_iterations'], \ + element.get('greater_than_timestamp', None) current_timeout = 0 regex_match = None @@ -63,8 +86,14 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: file_content = host_manager.get_file_content(host, monitoring_file) match_regex = re.findall(regex, file_content) - if match_regex and len(list(match_regex)) >= n_iterations: - elements_found = list(match_regex) + + if greater_than_timestamp: + match_that_fit_timestamp = filter_events_by_timestamp(match_regex) + else: + match_that_fit_timestamp = list(match_regex) + + if match_that_fit_timestamp and len(list(match_that_fit_timestamp)) >= n_iterations: + elements_found = list(match_that_fit_timestamp) regex_match = True break @@ -82,9 +111,7 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: if host not in monitoring_result: monitoring_result[host] = {} - monitoring_result[host]['not_found'] = elements_not_found - - monitoring_result[host]['found'] = elements_found + monitoring_result = {host: {'not_found': elements_not_found, 'found': elements_found}} return monitoring_result @@ -99,13 +126,13 @@ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: result = future.result() results.update(result) except Exception as e: - print(f"An error occurred: {e}") + logging.error(f"An error occurred: {e}") return results -def generate_monitoring_logs(host_manager: HostManager, regex_list: list, timeout_list: list, hosts: list, - n_iterations=1) -> dict: +def generate_monitoring_logs(host_manager: HostManager, regex_list: List[str], timeout_list: List[str], + hosts: List[str], n_iterations=1, greater_than_timestamp: str = '') -> Dict: """ Generate monitoring data for logs on all agent hosts. @@ -113,6 +140,9 @@ def generate_monitoring_logs(host_manager: HostManager, regex_list: list, timeou host_manager: An instance of the HostManager class containing information about hosts. regex_list: A list of regular expressions for monitoring. timeout_list: A list of timeout values for monitoring. + hosts: A list of target hosts. + n_iterations: The number of iterations to find the regex. Defaults to 1. + greater_than_timestamp: The timestamp to filter the results. Defaults to None. Returns: dict: Monitoring data for logs on all agent hosts. @@ -126,13 +156,14 @@ def generate_monitoring_logs(host_manager: HostManager, regex_list: list, timeou 'regex': regex_index, 'file': logs_filepath_os[os_name], 'timeout': timeout_list[index], - 'n_iterations': n_iterations + 'n_iterations': n_iterations, + 'greater_than_timestamp': greater_than_timestamp }) return monitoring_data def generate_monitoring_logs_manager(host_manager: HostManager, manager: str, regex: str, timeout: int, - n_iterations: int = 1) -> dict: + n_iterations: int = 1, greater_than_timestamp: str = '') -> Dict: """ Generate monitoring data for logs on a specific manager host. @@ -141,6 +172,7 @@ def generate_monitoring_logs_manager(host_manager: HostManager, manager: str, re manager: The target manager host. regex: The regular expression for monitoring. timeout: The timeout value for monitoring. + greater_than_timestamp: The timestamp to filter the results. Defaults to None. Returns: dict: Monitoring data for logs on the specified manager host. @@ -151,43 +183,8 @@ def generate_monitoring_logs_manager(host_manager: HostManager, manager: str, re 'regex': regex, 'file': logs_filepath_os[os_name], 'timeout': timeout, - 'n_iterations': n_iterations + 'n_iterations': n_iterations, + 'greater_than_timestamp': greater_than_timestamp }] return monitoring_data - - -def generate_monitoring_alerts_all_agent(host_manager: HostManager, events_metadata: dict) -> dict: - """ - Generate monitoring data for alerts on all agent hosts. - - Args: - host_manager: An instance of the HostManager class containing information about hosts. - events_metadata: Metadata containing information about events. - - Returns: - dict: Monitoring data for alerts on all agent hosts. - """ - monitoring_data = {} - - for agent in host_manager.get_group_hosts('agent'): - host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] - metadata_agent = events_metadata[host_os_name] - - if not host_manager.get_host_variables(agent)['manager'] in monitoring_data: - monitoring_data[host_manager.get_host_variables(agent)['manager']] = [] - - for event in metadata_agent[host_manager.get_host_variables(agent)['arch']]: - event['parameters']['HOST_NAME'] = agent - monitoring_element = { - 'regex': get_event_regex(event), - 'file': '/var/ossec/logs/alerts/alerts.json', - 'timeout': 120, - 'n_iterations': 1 - } - if 'parameters' in metadata_agent: - monitoring_element['parameters'] = metadata_agent['parameters'] - - monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element) - - return monitoring_data From 748eb8185c5bf8b2e4ec32bbbb8c9d78360922b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 11:52:26 +0000 Subject: [PATCH 108/174] fix: errors in remote operations functions --- .../end_to_end/remote_operations_handler.py | 58 +++++++++++++------ 1 file changed, 39 insertions(+), 19 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index cc5275637a..36bafefdcc 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -23,6 +23,7 @@ import os import json import logging +import time from typing import Dict, List from multiprocessing.pool import ThreadPool from datetime import datetime @@ -85,10 +86,20 @@ def install_package(host: str, operation_data: Dict[str, Dict], host_manager: Ho current_datetime = datetime.utcnow().isoformat() host_manager.install_package(host, package_url, system) + logging.critical(f"Package {package_url} installed on {host}") + time.sleep(200) logging.critical(f"Package installed on {host}") + results = { + 'evidences': { + "alerts_not_found": [], + "states_not_found": [] + }, + 'checks': {} + } - if operation_data['check']['alerts'] or operation_data['check']['state_index']: + + if 'check' in operation_data and (operation_data['check']['alerts'] or operation_data['check']['state_index']): logging.critical(f"Waiting for syscollector scan to finish on {host}") TIMEOUT_SYSCOLLECTOR_SCAN = 80 truncate_remote_host_group_files(host_manager, 'agent', 'logs') @@ -112,24 +123,16 @@ def install_package(host: str, operation_data: Dict[str, Dict], host_manager: Ho logging.critical(f"Checking agent vulnerability on {host}") - results = { - 'evidences': { - "alerts_not_found": [], - "states_not_found": [] - }, - 'checks': {} - } - if 'check' in operation_data: if operation_data['check']['alerts']: logging.critical(f'Checking vulnerability alerts in the indexer for {host}') - results["alerts_not_found"] = check_vuln_alert_indexer(host_manager, host, package_data, - current_datetime) + results['evidences']["alerts_not_found"] = check_vuln_alert_indexer(host_manager, host, package_data, + current_datetime) if operation_data['check']['state_index']: logging.critical(f'Checking vulnerability state index for {host}') - results["states_not_found"] = check_vuln_state_index(host_manager, host, package_data, - current_datetime) + results['results']["states_not_found"] = check_vuln_state_index(host_manager, host, package_data, + current_datetime) logging.critical(f"Results: {results}") @@ -137,10 +140,12 @@ def install_package(host: str, operation_data: Dict[str, Dict], host_manager: Ho results['checks']['all_successfull'] = False else: results['checks']['all_successfull'] = True + else: + results['checks']['all_successfull'] = True - return { - f"{host}": results - } + return { + f"{host}": results + } def remove_package(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): @@ -259,7 +264,7 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos system = host_manager.get_host_variables(host)['os'].split('_')[0] install_package_data_from = operation_data['package']['from'] - install_package_data_to= operation_data['package']['to'] + install_package_data_to = operation_data['package']['to'] package_id_from = None package_id_to = None @@ -288,6 +293,9 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos current_datetime = datetime.utcnow().isoformat() host_manager.install_package(host, package_url_to, system) + logging.critical(f"Package {package_url_to} installed on {host}") + time.sleep(200) + logging.critical(f"Package installed on {host}") if operation_data['check']['alerts'] or operation_data['check']['state_index']: @@ -385,6 +393,7 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man operation = operation_data['operation'] if operation in globals(): operation_result = globals()[operation](host, operation_data, host_manager) + logging.critical(f"Operation result: {operation_result}") return operation_result else: raise ValueError(f"Operation {operation} not recognized") @@ -405,17 +414,28 @@ def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager, results[target]['checks']['all_successfull'] = False def launch_and_store_result(args): + logging.info("Launching remote operation on host: {}".format(args[0])) host, task, manager = args result = launch_remote_operation(host, task, manager) + logging.info("FINAL Result of remote operation on host {}: {}".format(host, result)) results.update(result) with ThreadPoolExecutor() as executor: # Submit tasks asynchronously - futures = [executor.submit(launch_and_store_result, (host, task, host_manager)) - for task in task_list for host in host_manager.get_group_hosts(task['target'] - target_to_ignore)] + futures = [] + for task in task_list: + hosts_target = host_manager.get_group_hosts(task['target']) + if target_to_ignore: + hosts_target = [host for host in hosts_target if host not in target_to_ignore] + + logging.info("Hosts target after removing ignored targets: {}".format(hosts_target)) + + for host in hosts_target: + futures.append(executor.submit(launch_and_store_result, (host, task, host_manager))) # Wait for all tasks to complete for future in futures: future.result() + logging.info("Results in parallel operations: {}".format(results)) return results From 215671426926417c0d9d983b9f901daae650f006 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 11:53:07 +0000 Subject: [PATCH 109/174] feat: include evidence gathering logic --- .../test_vulnerability_detector/conftest.py | 118 ++++++++++++------ 1 file changed, 83 insertions(+), 35 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index a0b8419aa5..c769bc2c17 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -38,6 +38,26 @@ def test_example(host_manager): results = dict() +def collect_e2e_environment_data(test_name, host_manager) -> None: + """Collect data from the environment for the test + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + """ + logging.info("Collecting environment data") + environment_logs = get_hosts_logs(host_manager) + + current_dir = os.path.dirname(__file__) + vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") + tests_evidences_directory = os.path.join(str(vulnerability_detector_logs_dir), str(test_name)) + + for host in environment_logs.keys(): + host_logs_name_evidence = host + "_ossec.log" + evidence_file = os.path.join(tests_evidences_directory, host_logs_name_evidence) + with open(evidence_file, 'w') as evidence_file: + evidence_file.write(environment_logs[host]) + + def collect_evidences(test_name, host_manager, evidences) -> None: """ Collect evidences for the test @@ -51,33 +71,21 @@ def collect_evidences(test_name, host_manager, evidences) -> None: vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") tests_evidences_directory = os.path.join(str(vulnerability_detector_logs_dir), str(test_name)) logging.critical(f"Collecting evidences for {test_name}") - logging.critical(evidences) if evidences: logging.info(f"Collecting custom evidences for {test_name}") - if test_name in evidences.keys(): - evidences_to_collect = evidences[test_name] - for evidence, content in evidences_to_collect.items(): - if content is not None and content != [] and content != {}: - evidence_file = os.path.join(tests_evidences_directory, evidence + ".log") - with open(evidence_file, 'w') as evidence_file: - if content.__class__ == dict: - try: - json.dump(content, evidence_file, indent=4) - except TypeError: - logging.critical(f"Error dumping {evidence} to file") - pass - else: - evidence_file.write(str(content)) - - logging.info(f"Collecting generic evidences for {test_name}") - environment_logs = get_hosts_logs(host_manager) + for evidence, content in evidences.items(): + if content is not None and content != [] and content != {}: + evidence_file = os.path.join(tests_evidences_directory, evidence + ".log") + with open(evidence_file, 'w') as evidence_file: + if content.__class__ == dict: + try: + json.dump(content, evidence_file, indent=4) + except TypeError: + logging.critical(f"Error dumping {evidence} to file") + else: + evidence_file.write(str(content)) - for host in environment_logs.keys(): - host_logs_name_evidence = host + "_ossec.log" - evidence_file = os.path.join(tests_evidences_directory, host_logs_name_evidence) - with open(evidence_file, 'w') as evidence_file: - evidence_file.write(environment_logs[host]) @pytest.fixture(scope='session') @@ -98,38 +106,55 @@ def host_manager(request): """ inventory_path = request.config.getoption('--inventory-path') host_manager = HostManager(inventory_path) + print("Testing") + validate_environment(host_manager) return host_manager +def validate_environment(host_manager: HostManager) -> None: + """Check if the environment is accessible + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + """ + hosts_not_reachable = host_manager.get_hosts_not_reachable() + + assert len(hosts_not_reachable) == 0, f"Hosts not reachable: {hosts_not_reachable}" + + @pytest.fixture(scope='function') def setup(preconditions, teardown, host_manager): """ """ + logging.info("Running setup") + result = {} + if preconditions: + logging.info("Running preconditions") result = launch_parallel_operations(preconditions['tasks'], host_manager) + logging.info(f"Preconditions finished. Results: {result}") for host in result.keys(): if result[host]['checks']['all_successfull'] is False: - success_for_all_agents = False logging.critical(f"Test failed for host {host}. Check logs for more information") logging.critical(f"Evidences: {result[host]['evidences']}") + logging.info(f"Rsults of preconditions: {result}") yield result + logging.info("Running teardown") + if teardown: result = launch_parallel_operations(teardown, host_manager) for host in result.keys(): if result[host]['checks']['all_successfull'] is False: - success_for_all_agents = False logging.critical(f"Test failed for host {host}. Check logs for more information") logging.critical(f"Evidences: {result[host]['evidences']}") - - - + logging.info(f"Rsults of teardown: {result}") @pytest.fixture(scope='session', autouse=True) @@ -223,7 +248,7 @@ def pytest_runtest_makereport(item, call): results[report.location[0]] = {'passed': 0, 'failed': 0, 'skipped': 0, 'xfailed': 0, 'error': 0} extra = getattr(report, 'extra', []) - if report.when == 'call': + if report.when == 'teardown': # Apply hack to fix length filename problem pytest_html.HTMLReport.TestResult.create_asset = create_asset @@ -250,14 +275,34 @@ def pytest_runtest_makereport(item, call): logs_path = os.path.join(os.path.dirname(item._request.node.path), 'logs', item._request.node.name) if 'host_manager' in item.funcargs: - evidences = None - if 'get_results' in item.funcargs: - test_result = item.funcargs['get_results'] - evidences = test_result.get_evidences() + collect_e2e_environment_data(item._request.node.name, item.funcargs['host_manager']) + + if 'get_results' in item.funcargs: + test_result = item.funcargs['get_results'] + + if item._request.node.name in test_result and 'evidences' in test_result[item._request.node.name]: + evidences = test_result[item._request.node.name]['evidences'] + collect_evidences(item._request.node.name, item.funcargs['host_manager'], evidences) + else: + logging.info(f"No evidences found for {item._request.node.name}") + - collect_evidences(item._request.node.name, item.funcargs['host_manager'], evidences) + # if 'host_manager' in item.funcargs: + # evidences = None + # if 'get_results' in item.funcargs: + # test_result = item.funcargs['get_results'] + # if item._request.node.name in test_result and 'evidences' in test_result[item._request.node.name]: + # evidences = test_result[item._request.node.name]['evidences'] + # else: + # logging.critical(f"No evidences found for {item._request.node.name}") + # evidences = None - files = [f for f in os.listdir(logs_path) if + # collect_evidences(item._request.node.name, item.funcargs['host_manager'], evidences) + + files = [] + + if os.path.exists(logs_path): + files = [f for f in os.listdir(logs_path) if os.path.isfile(os.path.join(logs_path, f))] for filepath in files: @@ -318,3 +363,6 @@ def pytest_configure(config): current_dir = os.path.dirname(__file__) config.option.css = [os.path.join(current_dir, '../../../deps/wazuh_testing/wazuh_testing/reporting/style.css')] + + + From bfd14d6de31f29cc43d29ff9a612e24c7e64e85a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 11:53:37 +0000 Subject: [PATCH 110/174] fix: bug in indexer alert gathering --- .../wazuh_testing/end_to_end/vulnerability_detector.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py index a124aee7ab..61709b207b 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py @@ -84,8 +84,8 @@ def get_indexed_vulnerabilities_by_agent(indexed_vulnerabilities): agent = vulnerabilities_state['_source']['agent']['name'] if agent not in vulnerabilities_by_agent: vulnerabilities_by_agent[agent] = [] - else: - vulnerabilities_by_agent[agent].append(vulnerabilities_state) + + vulnerabilities_by_agent[agent].append(vulnerabilities_state) return vulnerabilities_by_agent From f9f9649284f3242f3c04b7e03ded59e2ec65776c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 11:54:19 +0000 Subject: [PATCH 111/174] feat: include all cases of VD E2E --- .../vuln_packages.json | 280 ++++++++++++++++-- 1 file changed, 248 insertions(+), 32 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json index faefd51aa2..fbdabd88a2 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json @@ -13,57 +13,176 @@ "grafana-8.5.5": { "package_name": "grafana", "package_version": "8.5.5", - "CVE": ["CVE-2023-2183"], + "CVE": ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] "urls": { "centos": { "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.x86_64.rpm", "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb" + } + }, + "uninstall_name": "grafana*" + }, + "grafana-8.5.6": { + "package_name": "grafana", + "package_version": "8.5.6", + "CVE": ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + "urls": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.6-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.6-1.aarch64.rpm" + }, + } + "uninstall_name": "grafana*" + }, + "grafana-9.1.1": { + "package_name": "grafana", + "package_version": "9.1.1", + "CVE": ["CVE-2023-2183", "CVE-2023-1387", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-23552", "CVE-2022-23498"], + "url": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.1.1-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.1.1-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb" + } + }, + "uninstall_name": "grafana*" + }, + "grafana-9.2.0": { + "package_name": "grafana", + "package_version": "9.2.0", + "CVE": ["CVE-2021-25804", "CVE-2021-25803", "CVE-2021-25802", "CVE-2021-25801","CVE-2020-26664"], + "url": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.2.0-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.2.0-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_arm64.deb" + }, + "uninstall_name": "grafana*" + }, + "grafana-9.4.17": { + "package_name": "grafana", + "package_version": "9.4.17", + "CVE": [], + "url": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.4.17-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.4.17-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.4.17_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.4.17_arm64.deb" + } + }, + "uninstall_name": "grafana*" + }, + "grafana-9.5.13": { + "package_name": "grafana-enterprise", + "package_version": "9.5.13", + "CVE": [], + "url": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.5.13-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.5.13-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.5.13_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.5.13_arm64.deb" + } + }, + "uninstall_name": "grafana*" + }, + "grafana-10.0.0": + { + "package_name": "grafana-enterprise", + "package_version": "10.0.0", + "CVE": ["CVE-2023-4822", "CVE-2023-4399"], + "url": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-10.0.0-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-10.0.0-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_10.0.0_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_10.0.0_arm64.deb" } }, "uninstall_name": "grafana*" }, - - - https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb - https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb - https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb - https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_arm64.deb - https://dl.grafana.com/enterprise/release/grafana-enterprise_9.4.17_arm64.deb - https://dl.grafana.com/enterprise/release/grafana-enterprise_9.5.13_arm64.deb - https://dl.grafana.com/enterprise/release/grafana-enterprise_10.0.0_arm64.deb - - - - - - - - - - - - - - - - - - "vlc-3.0.6": { - "package_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", + "package_name": "VLC media player" "package_version": "3.0.6", - "CVE": ["CVE-2019-12874"], + "CVE": ["CVE-2023-47360", "CVE-2023-47359", "CVE-2023-46814", "CVE-2022-41325", "CVE-2020-26664", "CVE-2019-19721", "CVE-2019-13962", "CVE-2019-13602", "CVE-2019-12874", "CVE-2019-5460", "CVE-2019-5459", "CVE-2019-5439"], "urls": { "windows": { "amd64": "https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe" } }, + "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", + "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + }, + "vlc-3.0.7": { + "package_name": "VLC media player" + "package_version": "3.0.7", + "CVE": ["CVE-2023-47360", "CVE-2023-47359", "CVE-2023-46814", "CVE-2022-41325", "CVE-2020-26664", "CVE-2019-19721", "CVE-2019-13962", "CVE-2019-13602", "CVE-2019-12874"] + "url": { + "windows": { + "amd64": "https://get.videolan.org/vlc/3.0.7/win64/vlc-3.0.7-win64.exe" + } + }, "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" + "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + }, + "vlc-3.0.7.1": { + "package_name": "VLC media player", + "package_version": "3.0.7.1", + "CVE": ["CVE-2019-14970", "CVE-2019-14778", "CVE-2019-14777", "CVE-2019-14776", "CVE-2019-14535", "CVE-2019-14534", "CVE-2019-14533", "CVE-2019-14498", "CVE-2019-14438", "CVE-2019-14437", "CVE-2019-13602"], + "url": { + "windows": { + "amd64": "https://get.videolan.org/vlc/3.0.7.1/win64/vlc-3.0.7.1-win64.exe" + } + }, + "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", + "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + }, + "vlc-3.0.11": { + "package_name": "VLC media player", + "package_version": "3.0.11", + "CVE": ["CVE-2021-25804", "CVE-2021-25803", "CVE-2021-25802", "CVE-2021-25801","CVE-2020-26664"], + "url": { + "windows": { + "amd64": "https://get.videolan.org/vlc/3.0.11/win64/vlc-3.0.11-win64.exe" + } + }, + "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", + "uninstall_custom_playbook": "windows_uninstall_vlc.yml" }, - "node-17.0.1": { + "vlc-3.0.20": { + "package_name": "VLC media player", + "package_version": "3.0.20", + "CVE": [], + "url": { + "windows": { + "amd64": "https://get.videolan.org/vlc/3.0.20/win64/vlc-3.0.20-win64.exe" + } + }, + "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", + "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + }, + + "node-v17.0.1": { "package_name": "node", "package_version": "17.0.1", - "CVE": ["CVE-2022-21824"], + "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], "urls": { "macos": { "amd64": "https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg", @@ -72,6 +191,103 @@ }, "uninstall_name": "node*" }, + "node-v17.1.0": { + "package_name": "node", + "package_version": "17.1.0", + "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg", + "arm64v8": "https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg" + } + }, + "uninstall_name": "node*" + }, + "node-v18.0.0": { + "package_name": "node", + "package_version": "18.0.0", + "CVE": ["CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30589", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-30581", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-43548", "CVE-2022-35256", "CVE-2022-35255", "CVE-2022-32223", "CVE-2022-32222", "CVE-2022-32215", "CVE-2022-32214", "CVE-2022-32213", "CVE-2022-32212", "CVE-2022-3786", "CVE-2022-3602"], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v18.0.0/node-v18.0.0.pkg", + "arm64v8": "https://nodejs.org/dist/v18.0.0/node-v18.0.0.pkg" + } + }, + "uninstall_name": "node*" + }, + "node-v18.11.0": { + "package_name": "node", + "package_version": "18.11.0", + "CVE": ["CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-30581", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-32222"], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v18.11.0/node-v18.11.0.pkg", + "arm64v8": "https://nodejs.org/dist/v18.11.0/node-v18.11.0.pkg" + } + }, + "uninstall_name": "node*" + }, + "node-v18.12.0": { + "package_name": "node", + "package_version": "18.12.0", + "CVE": ["CVE-2023-44487", "CVE-2023-38552", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-23936", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-43548", "CVE-2022-3786", "CVE-2022-3602"], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v18.12.0/node-v18.12.0.pkg", + "arm64v8": "https://nodejs.org/dist/v18.12.0/node-v18.12.0.pkg" + } + }, + "uninstall_name": "node*" + }, + "node-v19.5.0": { + "package_name": "node", + "package_version": "19.5.0", + "CVE": [], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg", + "arm64v8": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg" + } + "windows": "https://nodejs.org/dist/v19.7.0/win-x86/node.exe" + }, + "uninstall_name": "node*" + }, + "node-v19.6.0": { + "package_name": "node", + "package_version": "19.6.0", + "CVE": [], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v19.6.0/node-v19.6.0.pkg", + "arm64v8": "https://nodejs.org/dist/v19.6.0/node-v19.6.0.pkg" + } + }, + "uninstall_name": "node*" + }, + "node-v20.0.0": { + "package_name": "node", + "package_version": "20.0.0", + "CVE": ["CVE-2022-21824"], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v20.0.0/node-v20.0.0.pkg", + "arm64v8": "https://nodejs.org/dist/v20.0.0/node-v20.0.0.pkg" + } + }, + "uninstall_name": "node*" + }, + "node-v20.1.0": { + "package_name": "node", + "package_version": "20.1.0", + "CVE": ["CVE-2023-44487", "CVE-2023-39332", "CVE-2023-39331", "CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32558", "CVE-2023-32006", "CVE-2023-32005", "CVE-2023-32004", "CVE-2023-32003", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30586", "CVE-2023-30585", "CVE-2023-30581"], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v20.1.0/node-v20.1.0.pkg", + "arm64v8": "https://nodejs.org/dist/v20.1.0/node-v20.1.0.pkg" + } + }, + "uninstall_name": "node*" + }, "lynx-2.8.8":{ "package_name": "lynx", "package_version": "2.8.8-0.3.dev15.el7", From 5b200a13a45f960907fd9dffac6035bc550b34f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 11:54:51 +0000 Subject: [PATCH 112/174] feat: include host manager utilities --- .../wazuh_testing/tools/system.py | 83 ++++++++++++++++++- 1 file changed, 79 insertions(+), 4 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 02f4ea38fa..8df96c6f7b 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -4,11 +4,13 @@ import json import tempfile +import os import logging import xml.dom.minidom as minidom -from typing import Union +from typing import Union, List import testinfra import yaml +import ansible_runner from wazuh_testing.tools import WAZUH_CONF, WAZUH_API_CONF, API_LOG_FILE_PATH, WAZUH_LOCAL_INTERNAL_OPTIONS from wazuh_testing.tools.configuration import set_section_wazuh_conf @@ -484,7 +486,7 @@ def install_package(self, host, url, system='ubuntu'): result = False print(host) print(url) - if system =='windows': + if system == 'windows': result = self.get_host(host).ansible("win_package", f"path={url} arguments=/S", check=False) elif system == 'ubuntu': result = self.get_host(host).ansible("apt", f"deb={url}", check=False) @@ -494,8 +496,13 @@ def install_package(self, host, url, system='ubuntu'): result = self.get_host(host).ansible("yum", f"name={url} state=present sslverify=false disable_gpg_check=True", check=False) if 'rc' in result and result['rc'] == 0 and result['changed']: result = True + elif system == 'macos': + package_name = url.split('/')[-1] + result = self.get_host(host).ansible("command", f"curl -LO {url}", check=False) + print(result) + cmd = f"installer -pkg {package_name} -target /" + result = self.get_host(host).ansible("command", cmd, check=False) - print(result) return result def get_master_ip(self): @@ -561,7 +568,12 @@ def remove_package(self, host, package_name, system): os_name = self.get_host_variables(host)['os_name'] if os_name == 'windows': - result = self.get_host(host).ansible("win_command", f"& '{package_name}' /S", check=False) + logger.setLevel(logging.DEBUG) + r = self.run_playbook(host, 'remove_package_win', params={'uninstall_script_path': package_name}) + # result = self.get_host(host).ansible("ansible.windows.win_shell", fr'powershell -Command "& \"{package_name}\" /S /c"', check=False) + print(r) + logger.setLevel(logging.CRITICAL) + elif os_name == 'linux': os = self.get_host_variables(host)['os'].split('_')[0] if os == 'centos': @@ -570,10 +582,51 @@ def remove_package(self, host, package_name, system): logging.critical(f"Result: {result}") elif os == 'ubuntu': result = self.get_host(host).ansible("apt", f"name={package_name} state=absent", check=False) + elif os_name == 'macos': + result = self.get_host(host).ansible("command", f"brew uninstall {package_name}", check=False) print(result) return result + def run_playbook(self, host, playbook_name, params=None): + file_dir = os.path.dirname(os.path.realpath(__file__)) + print(playbook_name) + playbook_path = f"{file_dir}/playbooks/{playbook_name}.yaml" + new_playbook = None + new_playbook_path = None + + with open(playbook_path, 'r') as playbook_file: + playbook = playbook_file.read() + new_playbook = playbook.replace('HOSTS', host) + + temp_dir = tempfile.mkdtemp() + new_playbook_path = f"{temp_dir}/playbook.yaml" + + with open(f"{temp_dir}/playbook.yaml", 'w') as playbook_file: + playbook_file.write(new_playbook) + + r = None + + logger.setLevel(logging.DEBUG) + try: + print(host) + r = ansible_runner.run( + inventory=self.inventory_path, + playbook=new_playbook_path, + host_pattern=host, + extravars=params, + ) + print("Ansible playbook executed successfully.") + except Exception as e: + print(f"Error executing Ansible playbook: {e}") + + logger.setLevel(logging.CRITICAL) + + print(r) + return r + + + def handle_wazuh_services(self, host, operation): """ Handles Wazuh services on the specified host. @@ -660,6 +713,28 @@ def remove_agents(self): token=token, ) + def get_hosts_not_reachable(self) -> List[str]: + """ + Checks that all hosts provided in the inventory are accessible. + + Returns: + List[str]: List of hosts that are not reachable. + """ + hosts_not_reachable = [] + for host in self.get_group_hosts('all'): + logging.info(f"Checking host {host}...") + os_name = self.get_host_variables(host)['os_name'] + if os_name == 'windows': + command = 'ansible.windows.win_ping' + else: + command = 'ping' + try: + self.get_host(host).ansible(command, check=False) + except Exception as e: + logging.error(f"Error connecting to host {host}: {e}") + hosts_not_reachable.append(host) + + return hosts_not_reachable def clean_environment(host_manager, target_files): From 140b9c1a96f7ec9638c97a16acee116222a42537 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 11:55:15 +0000 Subject: [PATCH 113/174] feat: complete tests cases for VD --- .../cases/test_vulnerability.yaml | 1055 +++++++---------- 1 file changed, 455 insertions(+), 600 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 3d3aabcd65..27577f3272 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -1,31 +1,24 @@ -#- case: "Installation of a vulnerable package" -# id: "install_package" -# description: "Installation of a vulnerable package" -# preconditions: null -# body: -# tasks: -# - operation: install_package -# target: agent -# check: -# alerts: True -# state_index: True -# package: -# centos: -# # amd64: nmap-6.46 -# amd64: lynx-2.8.8 -# arm64v8: grafana-enterprise-8.5.5 -# ubuntu: -# amd64: grafana-enterprise-8.5.5 -# windows: -# amd64: vlc-3.0.6 -# macos: -# amd64: node-v17.0.1 - -- case: "Upgrade" - id: "upgrade_package" - description: "Upgrade of a vulnerable package" - preconditions: - tasks: +- case: "Installation of a vulnerable package" + id: "install_package" + description: | + Installation of a vulnerable package + macos: + Used Package: Node 17.0.1 - PKG Format + CVES: + amd64: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + arm64v8: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + windows: + Used Package: VLC 3.0.6 - Exe Format + CVE: ["CVE-2023-47360", "CVE-2023-47359", "CVE-2023-46814", "CVE-2022-41325", "CVE-2020-26664", "CVE-2019-19721", "CVE-2019-13962", "CVE-2019-13602", "CVE-2019-12874", "CVE-2019-5460", "CVE-2019-5459", "CVE-2019-5439"], + ubuntu: + Used Package Grafana 8.5.5 - .deb Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + centos: + Used Package Grafana - .rpm Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + preconditions: null + body: + tasks: - operation: install_package target: agent check: @@ -33,10 +26,94 @@ state_index: True package: centos: - amd64: firefox-78.9.0 + amd64: grafana-enterprise-8.5.5 + arm64v8: grafana-enterprise-8.5.5 + ubuntu: + amd64: grafana-enterprise-8.5.5 arm64v8: grafana-enterprise-8.5.5 + windows: + amd64: vlc-3.0.6 + macos: + amd64: node-v17.0.1 + arm64v8: node-v17.0.1 + +- case: "Remove vulnerable package" + id: "remove_package" + description: | + Removal of a vulnerable package + macos: + Used Package: Node 17.0.1 - PKG Format + CVES Expected to mitigate: + ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + windows: + Used Package: VLC 3.0.6 - Exe Format + + CVES Expected to mitigate: + ["CVE-2023-47360", "CVE-2023-47359", "CVE-2023-46814", "CVE-2022-41325", "CVE-2020-26664", "CVE-2019-19721", "CVE-2019-13962", "CVE-2019-13602", "CVE-2019-12874", "CVE-2019-5460", "CVE-2019-5459", "CVE-2019-5439"], + ubuntu: + Used Package Grafana 8.5.5 - .deb Format + CVES Expected to mitigate: + ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + centos: + Used Package Grafana - .rpm Format + CVE Expected to mitigate: + ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + + preconditions: null + body: + tasks: + - operation: remove_package + target: agent + check: + alerts: True + state_index: True + package: + centos: + amd64: grafana-enterprise-8.5.5 + arm64v8: grafana-enterprise-8.5.5 ubuntu: amd64: grafana-enterprise-8.5.5 + arm64v8: grafana-enterprise-8.5.5 + windows: + amd64: vlc-3.0.6 + macos: + amd64: node-v17.0.1 + arm64v8: node-v17.0.1 + + + +- case: "Upgrade: Maintain Vulnerability" + id: "upgrade_package_maintain_vulnerability" + description: | + Upgrade of a vulnerable package which maintain vulnerability + macos: + Used Package: Node 17.1.0 - PKG Format + CVES: + amd64: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + arm64v8: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + windows: + Used Package: VLC 3.0.7 - Exe Format + "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + ubuntu: + Used Package Grafana 8.5.6 - .deb Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + centos: + Used Package Grafana 8.5.6 - .rpm Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + preconditions: + tasks: + - operation: install_package + target: agent + check: + alerts: True + state_index: True + package: + centos: + amd64: grafana-enterprise-8.5.5 + arm64v8: grafana-enterprise-8.5.5 + ubuntu: + amd64: grafana-enterprise-8.5.5 + arm64v8: grafana-enterprise-8.5.5 windows: amd64: vlc-3.0.6 macos: @@ -52,10 +129,11 @@ package: from: centos: - amd64: firefox-78.9.0 + amd64: grafana-enterprise-8.5.5 arm64v8: grafana-enterprise-8.5.5 ubuntu: amd64: grafana-enterprise-8.5.5 + arm64v8: grafana-enterprise-8.5.5 windows: amd64: vlc-3.0.6 macos: @@ -63,7 +141,264 @@ arm64v8: node-v17.0.1 to: centos: - # amd64: nmap-6.46 + amd64: firefox-91.13.0 + arm64v8: grafana-enterprise-8.5.6 + ubuntu: + amd64: grafana-enterprise-8.5.6 + windows: + amd64: vlc-3.0.6 + macos: + amd64: node-v17.1.0 + arm64v8: node-v17.1.0 + teardown: null + + +# grafana-enterprise_9.1.1_amd64.deb grafana-enterprise_9.2.0_amd64.deb grafana-enterprise_9.4.17_amd64.deb Any of the previous ones (except 9.4.17) grafana-enterprise_9.4.17_amd64.deb grafana-enterprise_9.5.13_amd64.deb grafana-enterprise_10.0.0_amd64.deb + +- case: "Upgrade: New vulnerability " + id: "upgrade_package_maintain_add_vulnerability" + description: | + Upgrade of a vulnerable package which include a new vulnerability + macos: + Used Package: Node 18.11.0 - PKG Format + CVES: ["CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-30581", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-32222"], + windows: + Used Package: VLC 3.0.7.1 - Exe Format + "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + ubuntu: + Used Package Grafana 9.1.1 - .deb Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + centos: + Used Package Grafana 9.1.1 - .rpm Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + preconditions: null + body: + tasks: + - operation: update_package + target: agent + check: + alerts: True + state_index: True + package: + from: + centos: + amd64: grafana-enterprise-8.5.6 + arm64v8: grafana-enterprise-8.5.6 + ubuntu: + amd64: grafana-enterprise-8.5.6 + arm64v8: grafana-enterprise-8.5.6 + windows: + amd64: vlc-3.0.7 + macos: + amd64: node-v17.1.0 + arm64v8: node-v17.1.0 + to: + centos: + amd64: grafana-enterprise-9.1.1 + arm64v8: grafana-enterprise-9.1.1 + ubuntu: + amd64: grafana-enterprise-9.1.1 + arm64v8: grafana-enterprise-9.1.1 + windows: + amd64: vlc-3.0.7.1 + macos: + amd64: node-v18.11.0 + arm64v8: node-v18.11.0 + teardown: null + +- case: "Upgrade: Maintain and new vulnerability " + id: "upgrade_package_maintain_add_vulnerability" + description: | + Upgrade of a vulnerable package which maintain vulnerabilities and include new ones + macos: + Used Package: Node 18.12.0 - PKG Format + "CVE": ["CVE-2023-44487", "CVE-2023-38552", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-23936", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-43548", "CVE-2022-3786", "CVE-2022-3602"], + windows: + Used Package: VLC 3.0.11 - Exe Format + "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + ubuntu: + Used Package Grafana 9.2.0 - .deb Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + centos: + Used Package Grafana 9.2.0 - .rpm Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + + preconditions: null + body: + tasks: + - operation: update_package + target: agent + check: + alerts: True + state_index: True + package: + from: + centos: + amd64: grafana-enterprise-9.1.1 + arm64v8: grafana-enterprise-9.1.1 + ubuntu: + amd64: grafana-enterprise-9.1.1 + arm64v8: grafana-enterprise-9.1.1 + windows: + amd64: vlc-3.0.7.1 + macos: + amd64: node-v18.11.0 + arm64v8: node-v18.11.0 + to: + centos: + amd64: grafana-enterprise-9.2.0 + arm64v8: grafana-enterprise-9.2.0 + ubuntu: + amd64: grafana-enterprise-9.2.0 + arm64v8: grafana-enterprise-9.2.0 + windows: + amd64: vlc-3.0.11 + macos: + amd64: node-v18.12.0 + arm64v8: node-v18.12.0 + + teardown: null + +- case: "Upgrade: Cease vulnerability" + id: "upgrade_package_remove_vulnerability" + description: | + Upgrade of a vulnerable which cease to be vulnerable + macos: + Used Package: Node 19.5.0 - PKG Format + "CVE": [], + windows: + Used Package: VLC 3.0.11 - Exe Format + "CVE": [], + ubuntu: + Used Package Grafana 9.4.17 - .deb Format + CVE: [] + centos: + Used Package Grafana 9.4.17 - .rpm Format + CVE: [] + + preconditions: null + body: + tasks: + - operation: update_package + target: agent + check: + alerts: True + state_index: True + package: + from: + centos: + amd64: grafana-enterprise-9.2.0 + arm64v8: grafana-enterprise-9.2.0 + ubuntu: + amd64: grafana-enterprise-9.2.0 + arm64v8: grafana-enterprise-9.2.0 + windows: + amd64: vlc-3.0.11 + macos: + amd64: node-v18.12.0 + arm64v8: node-v18.12.0 + to: + centos: + amd64: grafana-enterprise-9.4.17 + arm64v8: grafana-enterprise-9.4.17 + ubuntu: + arm64v8: grafana-enterprise-9.4.17 + amd64: grafana-enterprise-9.4.17 + windows: + amd64: vlc-3.0.20 + macos: + amd64: node-v19.5.0 + arm64v8: node-v19.5.0 + + teardown: null + +- case: "Upgrade: Non vulnerable to non vulnerable" + id: "upgrade_package_nonvulnerable_to_nonvulnerable" + description: | + Upgrade of a non vulnerable package to non vulnerable + macos: + Used Package: Node 19.5.0 - PKG Format + "CVE": [], + windows: + Used Package: Node 19.5.0 - Exe Format + "CVE": [], + ubuntu: + Used Package Grafana 9.5.13 - .deb Format + CVE: [] + centos: + Used Package Grafana 9.5.13 - .rpm Format + CVE: [] + preconditions: + tasks: + - operation: install_package + target: agent + check: + alerts: True + state_index: True + package: + windows: + amd64: node-v19.5.0 + body: + tasks: + - operation: update_package + target: agent + check: + alerts: True + state_index: True + package: + from: + centos: + amd64: grafana-enterprise-9.4.17 + arm64v8: grafana-enterprise-9.4.17 + ubuntu: + arm64v8: grafana-enterprise-9.4.17 + amd64: grafana-enterprise-9.4.17 + windows: + amd64: vlc-3.0.20 + macos: + amd64: node-v19.5.0 + arm64v8: node-v19.5.0 + to: + centos: + amd64: grafana-enterprise-9.5.13 + arm64v8: grafana-enterprise-9.5.13 + ubuntu: + amd64: grafana-enterprise-9.5.13 + arm64v8: grafana-enterprise-9.5.13 + windows: + amd64: node-v19.6.0 + macos: + amd64: node-v19.6.0 + arm64v8: node-v19.6.0 + teardown: null + +- case: "Upgrade: Non vulnerable to vulnerable package" + id: "upgrade_package_nonvulnerable_to_vulnerable" + description: | + Upgrade to non vulnerable package to vulnerable + macos: + Used Package: Node 20.0.0 - PKG Format + "CVE": ["CVE-2023-44487", "CVE-2023-39332", "CVE-2023-39331", "CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32558", "CVE-2023-32006", "CVE-2023-32005", "CVE-2023-32004", "CVE-2023-32003", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30586", "CVE-2023-30585", "CVE-2023-30581"], + windows: + Used Package: Node 20.0.0 - Exe Format + "CVE": ["CVE-2023-44487", "CVE-2023-39332", "CVE-2023-39331", "CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32558", "CVE-2023-32006", "CVE-2023-32005", "CVE-2023-32004", "CVE-2023-32003", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30586", "CVE-2023-30585", "CVE-2023-30581"], + ubuntu: + Used Package Grafana 9.5.13 - .deb Format + CVE: ["CVE-2023-4822", "CVE-2023-4399"], + centos: + Used Package Grafana 9.5.13 - .rpm Format + CVE: ["CVE-2023-4822", "CVE-2023-4399"], + preconditions: null + body: + tasks: + - operation: update_package + target: agent + check: + alerts: True + state_index: True + package: + from: + centos: amd64: firefox-91.13.0 arm64v8: grafana-enterprise-8.5.5 ubuntu: @@ -71,574 +406,94 @@ windows: amd64: vlc-3.0.6 macos: - amd64: node-v17.0.1 - arm64v8: node-v17.0.1 - - -# - case: "Removal of a vulnerable package" -# id: "remove_package" -# description: "Remove of a vulnerable package" -# preconditions: null -# body: -# tasks: -# - operation: remove_package -# target: agent -# check: -# alerts: True -# state_index: True -# package: -# centos: -# # amd64: nmap-6.46 -# amd64: lynx-2.8.8 -# arm64v8: grafana-enterprise-8.5.5 -# ubuntu: -# amd64: grafana-enterprise-8.5.5 -# windows: -# amd64: vlc-3.0.6 -# macos: -# amd64: node-v17.0.1 -# arm64v8: node-v17.0.1 - - -# - case: "Upgrade of a vulnerable package: Remain vulnerable" -# id: "upgrade_package_remain_vulnerable" -# description: "Upgrade of a vulnerable package: Remain vulnerable" -# preconditions: null -# depends: "install_package" -# body: -# tasks: -# - operation: install_package -# target: agent -# package: -# centos: -# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb -# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb -# ubuntu: -# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb -# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb -# windows: -# amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.7-win64.exe -# macos: -# amd64: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg -# arm64v8: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg - -# - operation: check_agent_vulnerability -# target: agent -# parameters: -# alert_indexed: False -# state_indice: False -# vulnerability_data: -# centos: -# amd64: -# - PACKAGE_NAME: "postgresql11" -# PACKAGE_VERSION: "11.16-1PGDG.rhel7" -# CVE: CVE-2022-2625 -# arm64v8: -# - PACKAGE_NAME: "postgresql11" -# PACKAGE_VERSION: "11.15-1PGDG.rhel7" -# CVE: CVE-2022-2625 -# ubuntu: -# amd64: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "8.5.6" -# CVE: CVE-2023-2183 -# arm64v8: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "8.5.6" -# CVE: CVE-2023-2183 -# windows: -# amd64: -# - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" -# PACKAGE_VERSION: "3.0.7" -# CVE: CVE-2019-12874 -# macos: -# amd64: -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "17.1.0" -# CVE: CVE-2022-21824 -# arm64v8: -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "17.1.0" -# CVE: CVE-2022-21824 - -# - case: "Upgrade of a vulnerable package: New vulnerability" -# id: "upgrade_package_new_vulnerability" -# description: "Upgrade of a vulnerable package that become vulnerable to another CVE" -# preconditions: null -# depends: "upgrade_package_remain_vulnerable" -# body: -# tasks: -# - operation: install_package -# target: agent -# package: -# centos: -# amd64: [https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-x86_64/postgresql12-libs-12.13-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-x86_64/postgresql12-12.13-1PGDG.rhel7.x86_64.rpm] -# arm64v8: [https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-aarch64/postgresql12-libs-12.13-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/12/redhat/rhel-7-aarch64/postgresql12-12.13-1PGDG.rhel7.aarch64.rpm] -# ubuntu: -# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_amd64.deb -# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb -# windows: -# amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.8-win64.exe -# macos: -# amd64: https://nodejs.org/dist/v17.1.0/node-v18.0.0.pkg -# arm64v8: https://nodejs.org/dist/v17.1.0/node-v18.0.0.pkg - -# - operation: check_agent_vulnerability -# target: agent -# parameters: -# alert_indexed: False -# state_indice: False -# state: False -# vulnerability_data: -# centos: -# amd64: -# - PACKAGE_NAME: "postgresql12" -# CVE: CVE-2022-41862 -# arm64v8: -# - PACKAGE_NAME: "postgresql12" -# CVE: CVE-2022-41862 -# ubuntu: -# amd64: -# - PACKAGE_NAME: "grafana" -# CVE: CVE-2023-2183 -# arm64v8: -# - PACKAGE_NAME: "grafana" -# CVE: CVE-2023-2183 -# windows: -# amd64: -# - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" -# CVE: CVE-2019-12874 -# macos: -# amd64: -# - PACKAGE_NAME: "node" -# CVE: CVE-2022-21824 -# arm64v8: -# - PACKAGE_NAME: "node" -# CVE: CVE-2022-21824 - -# - operation: check_agent_vulnerability -# target: agent -# parameters: -# alert_indexed: False -# state_indice: False -# vulnerability_data: -# centos: -# amd64: -# - PACKAGE_NAME: "postgresql12" -# PACKAGE_VERSION: "12.13-1PGDG.rhel7" -# CVE: CVE-2022-2625 -# arm64v8: -# - PACKAGE_NAME: "postgresql12" -# PACKAGE_VERSION: "12.13-1PGDG.rhel7" -# CVE: CVE-2022-2625 -# ubuntu: -# amd64: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "9.1.1" -# CVE: CVE-2023-1387 -# arm64v8: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "9.1.1s" -# CVE: CVE-2023-1387 -# windows: -# amd64: -# - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" -# PACKAGE_VERSION: "3.0.8" -# CVE: CVE-2019-18278 -# macos: -# amd64: -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "18.0.0" -# CVE: CVE-2023-38552 - -# arm64v8: -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "18.0.0" -# CVE: CVE-2023-38552 - -# # -------------- -# # -# # -# - case: "Upgrade of a vulnerable package: Another vuln and maintain original vulnerability" -# id: "upgrade_package_new_vulnerability_and_maintain" -# description: "Upgrade of a vulnerable package that become vulnerable to another CVE and maintain the previous one" -# preconditions: -# tasks: -# - operation: install_package -# target: agent -# package: -# windows: -# amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.10-win64.exe -# - operation: check_agent_vulnerability -# target: agent -# parameters: -# alert_indexed: FalseSee -# state_indice: False -# vulnerability_data: -# amd64: -# - PACKAGE_NAME: "vlc" -# PACKAGE_VERSION: "3.0.10" -# CVE: CVE-2023-47360 - -# depends: "upgrade_package_remain_vulnerable" -# body: -# tasks: -# - operation: install_package -# target: agent -# package: -# centos: -# amd64: [https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-x86_64/postgresql13-libs-13.00-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-x86_64/postgresql13-13.00-1PGDG.rhel7.x86_64.rpm] -# arm64v8: [https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-aarch64/postgresql13-libs-13.00-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/13/redhat/rhel-7-aarch64/postgresql13-13.0-1PGDG.rhel7.aarch64.rpm] -# ubuntu: -# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_amd64.deb -# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_arm64.deb -# windows: -# amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.11-win64.exe -# macos: -# amd64: https://nodejs.org/dist/v17.1.0/node-v18.3.0.pkg -# arm64v8: https://nodejs.org/dist/v17.1.0/node-v18.3.0.pkg - -# - operation: check_agent_vulnerability -# target: agent -# parameters: -# alert_indexed: False -# state_indice: False -# vulnerability_data: -# centos: -# amd64: -# - PACKAGE_NAME: "postgresql13" -# PACKAGE_VERSION: "13.00-1PGDG.rhel7" -# CVE: CVE-2022-2625 -# - PACKAGE_NAME: "postgresql13" -# PACKAGE_VERSION: "13.00-1PGDG.rhel7" -# CVE: CVE-2021-23222 -# arm64v8: -# - PACKAGE_NAME: "postgresql13" -# PACKAGE_VERSION: "13.00-1PGDG.rhel7" -# CVE: CVE-2022-2625 -# - PACKAGE_NAME: "postgresql13" -# PACKAGE_VERSION: "13.00-1PGDG.rhel7" -# CVE: CVE-2021-23222 -# ubuntu: -# amd64: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "9.2.0" -# CVE: CVE-2023-1387 -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "9.2.0" -# CVE: CVE-2023-22462 -# arm64v8: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "9.2.0" -# CVE: CVE-2023-1387 -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "9.2.0" -# CVE: CVE-2023-22462 -# windows: -# amd64: -# - PACKAGE_NAME: "vlc" -# PACKAGE_VERSION: "3.0.11" -# CVE: CVE-2023-47360 -# - PACKAGE_NAME: "vlc" -# PACKAGE_VERSION: "3.0.11" -# CVE: CVE-2021-25801 -# macos: -# amd64: -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "18.3.0" -# CVE: CVE-2023-38552 -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "18.3.0" -# CVE: CVE-2023-32559 -# arm64v8: -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "18.3.0" -# CVE: CVE-2023-38552 -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "18.3.0" -# CVE: CVE-2023-32559 - - - -# # Updating a vulnerable package that ceases to be vulnerable - - - - - - -# # Deleting a vulnerable package - - -# # Delete non-vulnerable package - - -# # Install vulnerable package again and remove it - -# - case: "Removal of vulnerable package" -# id: "remove_vulnerable_package" -# description: "Installation of a vulnerable package" -# preconditions: -# tasks: -# - operation: install_package -# target: agent -# package: -# centos: -# amd64: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-libs-11.15-1PGDG.rhel7.x86_64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/postgresql11-11.5-1PGDG.rhel7.x86_64.rpm] -# arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.15-2PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.15-2PGDG.rhel7.aarch64.rpm] -# ubuntu: -# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb -# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb -# windows: -# amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe -# macos: -# amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg -# arm64v8: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg - -# - operation: check_agent_vulnerability -# target: agent -# parameters: -# alert_indexed: False -# state_indice: False -# vulnerability_data: -# centos: -# amd64: -# - PACKAGE_NAME: "postgresql11" -# PACKAGE_VERSION: "11.15-1PGDG.rhel7" -# CVE: CVE-2022-2625 -# arm64v8: -# - PACKAGE_NAME: "postgresql11" -# PACKAGE_VERSION: "11.15-1PGDG.rhel7" -# CVE: CVE-2022-2625 -# ubuntu: -# amd64: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "8.5.5" -# CVE: CVE-2023-2183 -# arm64v8: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "8.5.5" -# CVE: CVE-2023-2183 -# windows: -# amd64: -# - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" -# PACKAGE_VERSION: "3.0.6" -# CVE: CVE-2019-12874 -# macos: -# amd64: -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "17.0.1" -# CVE: CVE-2022-21824 -# arm64v8: -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "17.0.1" -# CVE: CVE-2022-21824 -# body: -# tasks: -# - operation: remove_package -# target: agent -# package: -# centos: -# amd64: nmap -# arm64v8: postgresql11* -# ubuntu: -# amd64: grafana* -# arm64v8: grafana* -# windows: -# amd64: vlc -# macos: -# amd64: node* - -# - operation: check_agent_vulnerability -# target: agent -# parameters: -# alert_indexed: False -# state_indice: False -# state: False -# vulnerability_data: -# centos: -# amd64: -# - PACKAGE_NAME: "postgresql11" -# PACKAGE_VERSION: "11.15-1PGDG.rhel7" -# CVE: CVE-2022-2625 -# arm64v8: -# - PACKAGE_NAME: "postgresql11" -# PACKAGE_VERSION: "11.15-1PGDG.rhel7" -# CVE: CVE-2022-2625 -# ubuntu: -# amd64: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "8.5.5" -# CVE: CVE-2023-2183 -# arm64v8: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "8.5.5" -# CVE: CVE-2023-2183 -# windows: -# amd64: -# - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" -# PACKAGE_VERSION: "3.0.6" -# CVE: CVE-2019-12874 -# macos: -# amd64: -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "17.0.1" -# CVE: CVE-2022-21824 -# arm64v8: -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "17.0.1" -# CVE: CVE-2022-21824 - - -# # ---------------------------------------------------------------------------------- - -# - case: "Updating a vulnerable package that remains vulnerable to the same CVE" -# id: "update_vuln_package_vuln_remain" -# description: "Updating a vulnerable package that remains vulnerable to the same CVE" -# preconditions: null -# body: -# tasks: -# - operation: install_package -# target: agent -# package: -# centos: -# amd64: https://nmap.org/dist/nmap-6.47-1.x86_64.rpm -# arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.18-2PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.18-1PGDG.rhel7.aarch64.rpm] -# ubuntu: -# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb -# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb -# windows: -# amd64: https://get.videolan.org/vlc/3.0.8/win64/vlc-3.0.8-win64.exe -# macos: -# amd64: https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg -# - operation: check_agent_vulnerability -# target: agent -# parameters: -# alert_indexed: False -# api: True -# alert: False -# state_indice: False -# vulnerability_data: -# centos: -# amd64: -# - PACKAGE_NAME: "nmap" -# PACKAGE_VERSION: "6.47-1" -# CVE: CVE-2020-28924 -# arm64v8: -# - PACKAGE_NAME: "postgresql11" -# PACKAGE_VERSION: "11.17.2" -# CVE: CVE-2023-39417 -# ubuntu: -# amd64: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "8.5.6" -# CVE: CVE-2023-2183 -# arm64v8: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "8.5.6" -# CVE: CVE-2023-2183 -# windows: -# amd64: -# - PACKAGE_NAME: "vlc" -# PACKAGE_VERSION: "3.0.8" -# CVE: CVE-2023-47360 -# macos: -# amd64: -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "17.1.0" -# CVE: CVE-2022-21824 -# teardown: -# tasks: -# - operation: remove_package -# target: agent -# package: -# windows: -# amd64: vlc - -# # --------------------------------------------------------------------- + amd64: node-v19.6.0 + arm64v8: node-v19.6.0 + to: + centos: + amd64: firefox-91.13.0 + arm64v8: grafana-enterprise-8.5.5 + ubuntu: + amd64: grafana-enterprise-8.5.5 + windows: + amd64: vlc-3.0.6 + macos: + amd64: node-v20.0.0 + arm64v8: node-v20.0.0 + + teardown: null + +- case: "Installation of a non vulnerable package" + id: "install_package" + description: | + Installation of a non vulnerable package + macos: + Used Package: Node 19.5.0 - PKG Format + "CVE": [], + windows: + Used Package: Node 19.5.0 - Exe Format + "CVE": [], + ubuntu: + Used Package Grafana 9.5.13 - .deb Format + CVE: [] + centos: + Used Package Grafana 9.5.13 - .rpm Format + CVE: [] + preconditions: null + body: + tasks: + - operation: install_package + target: agent + check: + alerts: True + state_index: True + package: + centos: + amd64: grafana-enterprise-9.5.13 + arm64v8: grafana-enterprise-9.5.13 + ubuntu: + amd64: grafana-enterprise-9.5.13 + arm64v8: grafana-enterprise-9.5.13 + windows: + amd64: node-v19.6.0 + macos: + amd64: node-v19.6.0 + arm64v8: node-v19.6.0 + +- case: "Remove: Non vulnerable package" + id: "remove_non_vulnerable_packge" + description: | + Removal of a non vulnerable package + macos: + Used Package: Node 19.5.0 - PKG Format + "CVE": [], + windows: + Used Package: Node 19.5.0 - Exe Format + "CVE": [], + ubuntu: + Used Package Grafana 9.5.13 - .deb Format + CVE: [] + centos: + Used Package Grafana 9.5.13 - .rpm Format + CVE: [] + body: + tasks: + - operation: remove_package + target: agent + check: + alerts: True + state_index: True + package: + centos: + amd64: grafana-enterprise-9.5.13 + arm64v8: grafana-enterprise-9.5.13 + ubuntu: + amd64: grafana-enterprise-9.5.13 + arm64v8: grafana-enterprise-9.5.13 + windows: + amd64: node-v19.6.0 + macos: + amd64: node-v19.6.0 + arm64v8: node-v19.6.0 -# - case: "Updating a vulnerable package that becomes vulnerable to another CVE" -# id: "updating_vulnerable_package_another_cve" -# description: "Updating a vulnerable package that becomes vulnerable to another CVE" -# preconditions: -# tasks: -# - operation: install_package -# target: agent -# package: -# windows: -# amd64: https://get.videolan.org/vlc/3.0.7/win32/vlc-3.0.7-win32.exe -# - operation: check_agent_vulnerability -# target: agent -# parameters: -# alert_indexed: False -# api: True -# alert: False -# state_indice: False -# vulnerability_data: -# windows: -# amd64: -# - PACKAGE_NAME: "vlc" -# PACKAGE_VERSION: "3.0.8" -# CVE: CVE-2023-47360 -# body: -# tasks: -# - operation: install_package -# target: agent -# package: -# centos: -# amd64: https://nmap.org/dist/nmap-7.00-1.x86_64.rpm -# arm64v8: [https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-libs-11.20-1PGDG.rhel7.aarch64.rpm, https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-aarch64/postgresql11-11.20-1PGDG.rhel7.aarch64.rpm] -# ubuntu: -# amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_amd64.deb -# arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb -# windows: -# amd64: https://get.videolan.org/vlc/3.0.7.1/win64/vlc-3.0.7.1-win64.exe -# macos: -# amd64: https://nodejs.org/dist/v18.0.0/node-v18.0.0.pkg -# - operation: check_agent_vulnerability -# target: agent -# parameters: -# alert_indexed: False -# api: True -# alert: False -# state_indice: False -# vulnerability_data: -# centos: -# amd64: -# # Wrong package -# - PACKAGE_NAME: "nmap" -# PACKAGE_VERSION: "7.00" -# CVE: CVE-2020-28924 -# - PACKAGE_NAME: "nmap" -# PACKAGE_VERSION: "7.00" -# CVE: CVE-2018-1000161 -# arm64v8: -# - PACKAGE_NAME: "postgresql11" -# PACKAGE_VERSION: "11.20" -# CVE: CVE-2023-39417 -# ubuntu: -# amd64: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "9.1.1" -# CVE: CVE-2023-1387 -# arm64v8: -# - PACKAGE_NAME: "grafana" -# PACKAGE_VERSION: "9.1.1" -# CVE: CVE-2023-1387 -# windows: -# amd64: -# - PACKAGE_NAME: "vlc" -# PACKAGE_VERSION: "3.0.7" -# CVE: CVE-2019-13962 -# STATUS: ABSENT -# - PACKAGE_NAME: "vlc" -# PACKAGE_VERSION: "3.0.7.1" -# CVE: CVE-2019-14437 -# # Wrong package -# macos: -# amd64: -# - PACKAGE_NAME: "node" -# PACKAGE_VERSION: "17.1.0" -# CVE: CVE-2022-21824 -# # ----------------------------------------------------------- + teardown: null From c31a96343e9a5134370b668e126c28153f4e0683 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 11:55:39 +0000 Subject: [PATCH 114/174] refac: vd e2e tests --- .../test_vulnerability_detector.py | 616 +++++++++++------- 1 file changed, 378 insertions(+), 238 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index ead2850fcd..ef655099ff 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -44,11 +44,10 @@ import pytest import logging import yaml -import json import time import ast import datetime -from typing import Generator +from typing import Generator, List from wazuh_testing.end_to_end.configuration import backup_configurations, restore_configuration, configure_environment from wazuh_testing.end_to_end.logs import truncate_remote_host_group_files @@ -74,121 +73,7 @@ 'agent': os.path.join(configurations_dir, 'agent.yaml') } vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") -TIMEOUT_PER_AGENT_VULNERABILITY_SCAN = 200 - - -class SyscollectorScansTestsResults: - """Class to store the results of the Syscollector Scans tests - - Attributes: - checks (dict): Dictionary with the results of the checks - evidences (dict): Dictionary with the evidences of the checks - """ - def __init__(self): - self.checks = { - 'syscollector_first_scan': True, - 'first_scan_all_agents_alerts_detected': True, - 'first_scan_all_agents_alerts_vulnerabilities': True, - 'first_scan_all_agents_index_detected': True, - 'first_scan_all_agents_index_vulnerabilities': True, - 'index_state_consistent': True, - 'syscollector_second_scan': True, - 'vulnerabilities_equal_between_scans': True, - 'vulnerabilities_equal_between_scans_indexer': True - } - - self.vulnerabilities_alerts_first_scan = {} - self.vulnerabilities_alerts_second_scan = {} - self.vulnerabilities_index_first_scan = {} - self.vulnerabilities_index_second_scan = {} - - self.timestamp_first_scan = None - self.timestamp_second_scan = None - - self.evidences = { - 'test_syscollector_first_scan': { - 'agents_not_scanned_first_scan': [], - 'testing_evidences_gathering': ["Testing"] - }, - 'test_syscollector_first_scan_alerts': { - 'agents_not_detected_vulnerabilities': [], - 'agents_not_scanned_first_scan': [], - }, - 'test_syscollector_first_scan_index': { - 'agents_not_detected_index_vulnerabilities': [], - 'agents_not_scanned_index': [], - }, - 'tests_syscollector_vulnerabilities_index_alerts_consistency': { - 'alerts_not_in_states': [], - 'states_not_in_alerts': [], - 'alerts_first_scan': [], - 'states_first_scan': [], - }, - 'test_syscollector_second_scan': { - 'agents_not_scanned_second_scan': [], - }, - 'tests_syscollector_first_second_scan_consistency_alerts': { - "vulnerabilities_not_equal_between_scans_alerts": [], - "agents_different_between_scans": [], - }, - 'tests_syscollector_first_second_scan_consistency_index': { - 'vulnerabilities_not_equal_between_scans_indexer': [], - } - } - - def get_evidences(self): - return self.evidences - - def summary(self): - """Print a summary of the results of the tests""" - if any(self.checks.values()): - if not self.checks['first_scan_all_agents_alerts_detected']: - logger.critical("Not all agents generated vulnerabilities. Missing agents:" - f"{self.evidences['agents_not_detected_vulnerabilities']}") - - if not self.checks['first_scan_all_agents_alerts_vulnerabilities']: - logger.critical("Not all agents generated vulnerabilities. Missing agents:" - f"{self.evidences['agents_not_detected_vulnerabilities']}") - - if not self.checks['first_scan_all_agents_index_detected']: - logger.critical("Not all agents were scanned in the index. Missing agents:" - f"{self.evidences['agents_not_scanned_index']}") - - if not self.checks['first_scan_all_agents_index_vulnerabilities']: - logger.critical("Not all agents generated vulnerabilities in the index. Missing agents:" - f"{self.evidences['agents_not_detected_index_vulnerabilities']}") - - if not self.checks['index_state_consistent']: - logger.critical("Index state is not consistent with the alerts." - "Inconsistencies can be found in the logs directory") - - if not self.checks['syscollector_second_scan']: - logger.critical("Syscollector scan not started in the following agents:" - f"{self.evidences['agents_syscollector_second_scan_not_started']}") - - if not self.checks['vulnerabilities_equal_between_scans']: - logger.critical("The number of vulnerabilities alerts is not the same between scans." - "Incosistencies can be found in the logs directory") - - if not self.checks['vulnerabilities_equal_between_scans_indexer']: - logger.critical("The number of vulnerabilities in the index is not the same between scans." - "Incosistencies can be found in the logs directory") - - -@pytest.fixture(scope='module') -def results(): - """ - Handle the results of the tests - - Args: - host_manager: An instance of the HostManager class containing information about hosts. - """ - - initial_tests_results = SyscollectorScansTestsResults() - - yield initial_tests_results - - initial_tests_results.summary() +TIMEOUT_PER_AGENT_VULNERABILITY_SCAN = 60 def load_vulnerability_detector_configurations(host_manager): @@ -346,13 +231,14 @@ def check_vuln_state_consistency(vulnerabilities_alerts, vulnerabilities_states) @pytest.mark.filterwarnings('ignore::urllib3.exceptions.InsecureRequestWarning') class TestInitialScans(): - results = SyscollectorScansTestsResults() + # results = SyscollectorScansTestsResults() + results = {} @pytest.fixture(scope='class') def get_results(self): return self.results - def test_syscollector_first_scan(self, host_manager, setup_vulnerability_tests, get_results): + def test_syscollector_first_scan(self, request, host_manager, setup_vulnerability_tests, get_results): """ description: Validates the initiation of Syscollector scans across all agents in the environment. @@ -362,20 +248,17 @@ def test_syscollector_first_scan(self, host_manager, setup_vulnerability_tests, tier: 0 parameters: + - request: pytest request object - host_manager: type: fixture brief: Get the host manager of the environment - setup_vulnerability_tests: type: fixture brief: Setup the environment to proceed with the testing + - get_results: fixture to get the results of global class tests assertions: - - Verify that syscollector scan is started after agent start - - Verify that Vulnerability scan is performed for all the agent - - Verify that vulnerabilities are generated for each agent (Check vulnerabilities using Wazuh API) - - Verify that Agent's Vulnerabilities index is updated and is conssitent with the API results - - Verify that second scan is performed in expected timeframe - - Verify that no new vulnearbilities are detected since the first scan + - Verify that syscollector scan is started after agent start in all agents cases: None @@ -383,9 +266,17 @@ def test_syscollector_first_scan(self, host_manager, setup_vulnerability_tests, - syscollector - vulnerability_detector """ - test_fail_message = "" results = get_results - test_name = 'test_syscollector_first_scan' + test_name = request.node.name + + test_result = { + 'checks': { + 'all_successfull': True, + }, + 'evidences': { + 'agents_not_scanned_first_scan': [] + } + } logger.critical("Monitoring Syscollector First Scan") list_hosts = host_manager.get_group_hosts('agent') @@ -394,65 +285,145 @@ def test_syscollector_first_scan(self, host_manager, setup_vulnerability_tests, get_event_regex({'event': 'syscollector_scan_end'})], [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], list_hosts) - monitoring_results = monitoring_events_multihost(host_manager, monitoring_data) logger.critical(f"Value of monitoring results is: {monitoring_results}") for agent in monitoring_results: if monitoring_results[agent]['not_found']: - results.checks['syscollector_first_scan'] = False - results.evidences[test_name]['agents_not_scanned_first_scan'].append(agent) - test_fail_message += "Syscollector scan not started in the following agents:" \ - f"{results.evidences['agents_not_scanned_first_scan']}. " \ - 'Continuing with the test' + test_result['checks']['all_successfull'] = False + test_result['evidences']['agents_not_scanned_first_scan'].append(agent) + + results[test_name] = test_result + + if not test_result['checks']['all_successfull']: + pytest.fail("Some agents has not been scanned:" + f"{test_result['evidences']['agents_not_scanned_first_scan']}." + "Check logs for more information") + else: + logger.critical("All agents has been scanned") + + def test_syscollector_first_scan_alerts(self, request, host_manager, setup_vulnerability_tests, get_results): + """ + description: Validates that the Vulnerability Detector detects vulnerabilities within the environment in the + first scan. + + This test ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment. + It is assumed that provided hosts will have at least one vulnerability. - if not results.checks['syscollector_first_scan']: - pytest.fail(test_fail_message) + tier: 0 + + parameters: + - request: pytest request object + - host_manager: + type: fixture + brief: Get the host manager of the environment + - setup_vulnerability_tests: + type: fixture + brief: Setup the environment to proceed with the testing + - get_results: fixture to get the results of global class tests + + assertions: + - Verify that all agents has been scanned + - Verify that all agents has generated vulnerabilities + + cases: None + + tags: + - syscollector + - vulnerability_detector + """ + test_result = { + 'checks': { + 'all_successfull': True, + }, + 'evidences': { + 'agents_not_detected_alerts': [], + } + } - def test_syscollector_first_scan_alerts(self, host_manager, setup_vulnerability_tests, get_results): results = get_results - test_name = 'test_syscollector_first_scan_alerts' + test_name = request.node.name - agents_to_check = results.evidences['test_syscollector_first_scan_alerts']['agents_not_scanned_first_scan'] + # Filter agents that has not been scanned + agents_to_check = results['test_syscollector_first_scan']['evidences']['agents_not_scanned_first_scan'] if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): pytest.skip("Syscollector scan not started in any agent. Skipping test") - # Wait until all agents has been scanned - logger.critical("Waiting until agent's VD scan is over") - # Replace with relevan event. For now timeout - time.sleep(TIMEOUT_PER_AGENT_VULNERABILITY_SCAN) + # Wait until all agents has been scanned. Timeout: 60 seconds per agent + for agent in host_manager.get_group_hosts('agent'): + logger.critical(f"Waiting until agent {agent} has been scanned." + f"Waiting: {TIMEOUT_PER_AGENT_VULNERABILITY_SCAN}") + time.sleep(TIMEOUT_PER_AGENT_VULNERABILITY_SCAN) logger.critical("Check agent's vulnerabilities") alerts_first_scan = get_indexer_values(host_manager, greater_than_timestamp=setup_vulnerability_tests)['hits']['hits'] vuln_alerts_by_agent_first_scan = get_alerts_by_agent(alerts_first_scan, 'CVE.*? affects.*"?') - results.vulnerabilities_alerts_first_scan = vuln_alerts_by_agent_first_scan - # Check that it has been triggered vulnerability detector alerts logger.critical("Checking that all agents has been scanned") for agent in agents_to_check: - if agent not in list(vuln_alerts_by_agent_first_scan.keys()): + if agent not in list(vuln_alerts_by_agent_first_scan.keys()) or \ + len(vuln_alerts_by_agent_first_scan[agent]) == 0: logger.critical(f"Agent {agent} has not been scanned. Continuing with remaining agents") - results.checks['first_scan_all_agents_alerts_detected'] = False - results[test_name]['agents_not_scanned_first_scan'].append(agent) + test_result['checks']['all_successfull'] = False + test_result['evidences']['agents_not_detected_alerts'].append(agent) + + results[test_name] = test_result + + # Store full alert list in global results. It is needed for the next test + results['vulnerabilities_alerts_first_scan'] = vuln_alerts_by_agent_first_scan + + if not test_result['checks']['all_successfull']: + pytest.fail(f"Some agents has not been scanned: {test_result['evidences']['agents_not_scanned_first_scan']}." + "Check logs for more information") + else: + logger.critical("All agents has been scanned") - if len(vuln_alerts_by_agent_first_scan[agent]) == 0: - logger.critical(f"Agent {agent} has not generated vulnerabilities. Continuing with remaining agents") - results.checks['first_scan_all_agents_alerts_vulnerabilities'] = False - results[test_name]['agents_not_detected_vulnerabilities'].append(agent) + def test_syscollector_first_scan_index(self, request, host_manager, setup_vulnerability_tests, get_results): + """ + description: Validates that the Vulnerability Detector detects vulnerabilities within the environment in the + first scan in the index. + + This test ensures that the Vulnerability Detector accurately detects vulnerabilities within the environment in + the index. It is assumed that provided hosts will have at least one vulnerability. + + tier: 0 + + parameters: + - request: pytest request object + - host_manager: + type: fixture + brief: Get the host manager of the environment + - setup_vulnerability_tests: + type: fixture + brief: Setup the environment to proceed with the testing + - get_results: fixture to get the results of global class tests - if not results.checks['first_scan_all_agents_alerts_detected'] or not \ - results.checks['first_scan_all_agents_alerts_vulnerabilities']: - pytest.fail("Test failed. Check logs for more information") + assertions: + - Verify that all agents has been scanned + - Verify that all agents has generated vulnerabilities in the index + cases: None - def test_syscollector_first_scan_index(self, host_manager, setup_vulnerability_tests, get_results): + tags: + - syscollector + - vulnerability_detector + """ results = get_results - test_name = 'test_syscollector_first_scan_index' + test_name = request.node.name + test_result = { + 'checks': { + 'all_successfull': True, + }, + 'evidences': { + 'agents_not_detected_index_vulnerabilities': [], + } + } - agents_to_check = results.evidences['test_syscollector_first_scan_alerts']['agents_not_scanned_first_scan'] + # Filter agents that has not been scanned + agents_to_check = results['test_syscollector_first_scan']['evidences']['agents_not_scanned_first_scan'] if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): pytest.skip("Syscollector scan not started in any agent. Skipping test") @@ -462,60 +433,134 @@ def test_syscollector_first_scan_index(self, host_manager, setup_vulnerability_t greater_than_timestamp=setup_vulnerability_tests) index_vulnerabilities_by_agent_first_scan = get_indexed_vulnerabilities_by_agent(index_state_first_scan) - results.vulnerabilities_index_first_scan = index_vulnerabilities_by_agent_first_scan - logger.critical("Checking that all agents has been scanned and generated vulnerabilities in the index") for agent in agents_to_check: - if agent not in list(index_vulnerabilities_by_agent_first_scan.keys()): + + if agent not in list(index_vulnerabilities_by_agent_first_scan.keys()) or \ + len(index_vulnerabilities_by_agent_first_scan[agent]) == 0: logger.critical(f"Agent {agent} has not been scanned. Continuing with remaining agents") - results.checks['first_scan_all_agents_index_detected'] = False - results[test_name]['agents_not_scanned_index'].append(agent) + test_result['checks']['all_successfull'] = False + test_result['evidences']['agents_not_detected_index_vulnerabilities'].append(agent) + + results[test_name] = test_result - if len(index_vulnerabilities_by_agent_first_scan[agent]) == 0: - logger.critical(f"Agent {agent} has not generated vulnerabilities. Continuing with remaining agents") - results.checks['first_scan_all_agents_index_vulnerabilities'] = False - results[test_name]['agets_not_detected_index_vulnerabilities'].append(agent) + # Store full alert index list in global results. It is needed for the next test + results['vulnerabilities_index_first_scan'] = index_vulnerabilities_by_agent_first_scan - if not results.checks['first_scan_all_agents_index_detected'] or not \ - results.checks['first_scan_all_agents_index_vulnerabilities']: - pytest.fail("Test failed. Check logs for more information") + if not test_result['checks']['all_successfull']: + pytest.fail("Some agents has not been scanned and updated states index:" + f"{test_result['evidences']['agents_not_detected_alerts']}.") + else: + logger.critical("All agents has been scanned and updated states index") - def tests_syscollector_vulnerabilities_index_alerts_consistency(self, host_manager, + def tests_syscollector_vulnerabilities_index_alerts_consistency(self, request, setup_vulnerability_tests, get_results): - results = get_results - test_name = 'tests_syscollector_vulnerabilities_index_alerts_consistency' + """ + description: Ensure the consistency of the agent's vulnerabilities between the index and the alerts. - agents_to_check = results.evidences['test_syscollector_first_scan_alerts']['agents_not_scanned_first_scan'] - if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): - pytest.skip("Syscollector scan not started in any agent. Skipping test") + This test ensure that alerts in the index are consistent with the alerts in the alerts index after the first + scan. + + tier: 0 + + parameters: + - request: pytest request object + - setup_vulnerability_tests: + type: fixture + brief: Setup the environment to proceed with the testing + - get_results: fixture to get the results of global class tests + + assertions: + - Verify that the index is consistent with the alerts + + cases: None + + tags: + - syscollector + - vulnerability_detector + """ + results = get_results + test_name = request.node.name + test_result = { + 'checks': { + 'all_successfull': True, + }, + 'evidences': { + 'alerts_not_in_states': [], + 'states_not_in_alerts': [], + 'alerts_first_scan': results['vulnerabilities_alerts_first_scan'], + 'states_first_scan': results['vulnerabilities_index_first_scan'] + } + } # Check that the index is consistent with the alerts logging.critical("Checking index state consistency") inconsistencies_between_alerts_indexer = \ - check_vuln_state_consistency(results.vulnerabilities_alerts_first_scan, - results.vulnerabilities_index_first_scan) + check_vuln_state_consistency(results['vulnerabilities_alerts_first_scan'], + results['vulnerabilities_index_first_scan']) - results.evidences[test_name]['alerts_not_in_states'] = \ + test_result['evidences']['alerts_not_in_states'] = \ inconsistencies_between_alerts_indexer['alerts_not_in_states'] - results.evidences[test_name]['states_not_in_alerts'] = \ + + test_result['evidences']['states_not_in_alerts'] = \ inconsistencies_between_alerts_indexer['states_not_in_alerts'] - if len(results.evidences[test_name]['alerts_not_in_states']) > 0 or \ - len(results.evidences[test_name]['states_not_in_alerts']) > 0: + if len(test_result['evidences']['alerts_not_in_states']) > 0 or \ + len(test_result['evidences']['states_not_in_alerts']) > 0: logger.critical("Index state is not consistent with the alerts") - results.checks['index_state_consistent'] = False + test_result['checks']['all_successfull'] = False + + results[test_name] = test_result + + if not test_result['checks']['all_successfull']: + logger.critical("Index state is not consistent with the alerts") + logger.critical(f"Alerts not in states: {test_result['evidences']['alerts_not_in_states']}") + logger.critical(f"States not in alerts: {test_result['evidences']['states_not_in_alerts']}") + pytest.fail() + else: + logger.critical("Index state is consistent with the alerts") + + def test_syscollector_second_scan(self, request, host_manager, setup_vulnerability_tests, get_results): + """ + description: Validates the initiation of the second Syscollector scans across all agents in the environment. - results.evidences[test_name]['alerts_first_scan'] = results.vulnerabilities_alerts_first_scan - results.evidences[test_name]['states_first_scan'] = results.vulnerabilities_index_first_scan + This test ensures that Syscollector second scans are started in all agents in the environment. - if not results.checks['index_state_consistent']: - pytest.fail("Test failed. Check logs for more information") + tier: 0 + + parameters: + - request: pytest request object + - host_manager: + type: fixture + brief: Get the host manager of the environment + - setup_vulnerability_tests: + type: fixture + brief: Setup the environment to proceed with the testing + - get_results: fixture to get the results of global class tests + + assertions: + - Verify that all agents has been scanned + + cases: None + + tags: + - syscollector + - vulnerability_detector + """ - def test_syscollector_second_scan(self, host_manager, setup_vulnerability_tests, get_results): results = get_results - test_name = 'test_syscollector_second_scan' + test_name = request.node.name + test_result = { + 'checks': { + 'all_successfull': True, + }, + 'evidences': { + 'agents_syscollector_second_scan_not_started': [] + } + } - agents_to_check = results.evidences['test_syscollector_first_scan_alerts']['agents_not_scanned_first_scan'] + # Filter agents that has not been scanned + agents_to_check = results['test_syscollector_first_scan']['evidences']['agents_not_scanned_first_scan'] if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): pytest.skip("Syscollector scan not started in any agent. Skipping test") @@ -528,103 +573,194 @@ def test_syscollector_second_scan(self, host_manager, setup_vulnerability_tests, monitoring_results = monitoring_events_multihost(host_manager, monitoring_data) logger.critical("Checking that all agents has been scanned") - for agent in monitoring_results: if monitoring_results[agent]['not_found']: - results.checks['syscollector_second_scan'] = False - results.evidences[test_name]['agents_syscollector_second_scan_not_started'].append(agent) + test_result['checks']['all_successfull'] = False + test_result['evidences']['agents_syscollector_second_scan_not_started'].append(agent) logging.critical(f"Syscollector scan not started in the following agents:" - f"{results.evidences['agents_syscollector_second_scan_not_started']}." + f"{test_result['evidences']['agents_syscollector_second_scan_not_started']}." 'Continuing with the test') - if not results.checks['syscollector_second_scan']: - logger.critical("Syscollector scan not started in the following agents:" - f"{results.evidences['agents_syscollector_second_scan_not_started']}.") + results[test_name] = test_result - pytest.fail("Test failed. Check logs for more information") + if not test_result['checks']['all_successfull']: + pytest.fail("Syscollector scan not started in the following agents:" + f"{test_name['evidences']['agents_syscollector_second_scan_not_started']}.") + else: + logger.critical("Syscollector scan started in all agents") - def tests_syscollector_first_second_scan_consistency_alerts(self, host_manager, setup_vulnerability_tests, + def tests_syscollector_first_second_scan_consistency_alerts(self, request, host_manager, setup_vulnerability_tests, get_results): + """ + description: Ensure the consistency of the agent's vulnerabilities between the first and second scans. + + This test ensure that alerts in the first scan are consistent with the alerts in the second scan. + + tier: 0 + + parameters: + - request: pytest request object + - host_manager: + type: fixture + brief: Get the host manager of the environment + - setup_vulnerability_tests: + type: fixture + brief: Setup the environment to proceed with the testing + - get_results: fixture to get the results of global class tests + + assertions: + - Verify that the number of vulnerabilities is the same between scans + + cases: None + + tags: + - syscollector + - vulnerability_detector + """ + + test_name = request.node.name results = get_results - test_name = 'tests_syscollector_first_second_scan_consistency_alerts' + request = request + test_result = { + 'checks': { + 'all_successfull': True, + }, + 'evidences': { + 'vulnerabilities_not_equal_between_scans_alerts': [], + 'agents_different_between_scans': [], + 'vulnerabilities_alerts_first_scan': results['vulnerabilities_alerts_first_scan'], + 'vulnerabilities_alerts_second_scan': [] + } + } + + # Filter agents that has not been scanned + agents_to_check = results['test_syscollector_first_scan']['evidences']['agents_not_scanned_first_scan'] - agents_to_check = results.evidences['test_syscollector_first_scan_alerts']['agents_not_scanned_first_scan'] if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): pytest.skip("Syscollector scan not started in any agent. Skipping test") logger.critical("Waiting until agent's VD scan is over") - time.sleep(60) - logger.critical("Checking vulnerabilities in the second scan") + # Only wait for the first agent. Any possible discrepancy will be detected in this interval + time.sleep(TIMEOUT_PER_AGENT_VULNERABILITY_SCAN) + logger.critical("Checking vulnerabilities in the second scan") alerts_second_scan = get_indexer_values(host_manager, greater_than_timestamp=setup_vulnerability_tests)['hits']['hits'] vuln_alerts_by_agent_second_scan = get_alerts_by_agent(alerts_second_scan, 'CVE.*? affects.*"?') - results.vulnerabilities_alerts_second_scan = vuln_alerts_by_agent_second_scan + + # Store full alert list in global results. It is needed for the next test + results['vulnerabilities_alerts_second_scan'] = vuln_alerts_by_agent_second_scan + test_result['evidences']['vulnerabilities_alerts_second_scan'] = vuln_alerts_by_agent_second_scan alert_present_in_first_scan_not_in_second_scan = [] - alert_present_in_second_scan_not_in_second_scan = [] + alert_present_in_second_scan_not_in_first_scan = [] - if len(vuln_alerts_by_agent_second_scan) != len(results.vulnerabilities_alerts_first_scan): - results.checks['vulnerabilities_equal_between_scans'] = False - logger.critical(f"First scan: {len(results.vulnerabilities_alerts_first_scan)}") + if len(vuln_alerts_by_agent_second_scan.keys()) != len(results['vulnerabilities_alerts_first_scan'].keys()): + test_result['checks']['all_successfull'] = False + logger.critical(f"First scan: {len(results['vulnerabilities_alerts_first_scan'])}") logger.critical(f"Second scan: {len(vuln_alerts_by_agent_second_scan)}") logger.critical("Checking that all agents has been scanned") # Check if the number of agents for each scan is the same - if list(results.vulnerabilities_alerts_first_scan.keys()) != list(vuln_alerts_by_agent_second_scan.keys()): - results.checks['vulnerabilities_equal_between_scans'] = False + if list(results['vulnerabilities_alerts_first_scan'].keys()) != list(vuln_alerts_by_agent_second_scan.keys()): + test_result['checks']['all_successfull'] = False logging.critical(f"Agents with vulnerabilities changed between scans: " - f"First scan: {list(results.vulnerabilities_alerts_first_scan.keys())}" + f"First scan: {list(results['vulnerabilities_alerts_first_scan'].keys())}" f"Second scan: {list(vuln_alerts_by_agent_second_scan.keys())}") - results[test_name]['evidences']['agents_different_between_scans'] = \ - list(set(list(results.vulnerabilities_alerts_first_scan.keys())) ^ set(list(vuln_alerts_by_agent_second_scan.keys()))) + + test_result['evidences']['agents_different_between_scans'] = \ + list(set(list(results['vulnerabilities_alerts_first_scan'].keys())) ^ + set(list(results['vulnerabilities_alerts_second_scan'].keys()))) logger.critical("Checking that all agents has been scanned") # Check if the number of vulnerabilities for each agent is the same for agent in agents_to_check: for alert in list(vuln_alerts_by_agent_second_scan[agent][0]): - alert_present_in_second_scan_not_in_second_scan.append(alert) + alert_present_in_second_scan_not_in_first_scan.append(alert) - for alert in list(results.vulnerabilities_alerts_first_scan[agent][0]): + for alert in list(results['vulnerabilities_alerts_first_scan'][agent][0]): if alert in alert_present_in_first_scan_not_in_second_scan: alert_present_in_first_scan_not_in_second_scan.remove(alert) logger.critical("Checking that all agents has been scanned") - if alert_present_in_first_scan_not_in_second_scan or alert_present_in_second_scan_not_in_second_scan: - results.checks['vulnerabilities_equal_between_scans'] = False - results.evidences[test_name]['vulnerabilities_not_equal_between_scans_alerts'] = { + if alert_present_in_first_scan_not_in_second_scan or alert_present_in_second_scan_not_in_first_scan: + test_result['checks']['all_successfull'] = False + test_result['evidences']['vulnerabilities_not_equal_between_scans_alerts'] = { 'alert_present_in_first_scan_not_in_second_scan': alert_present_in_first_scan_not_in_second_scan, - 'alert_present_in_second_scan_not_in_second_scan': alert_present_in_second_scan_not_in_second_scan + 'alert_present_in_second_scan_not_in_first_scan': alert_present_in_second_scan_not_in_first_scan } - if not results.checks['vulnerabilities_equal_between_scans']: - pytest.fail("Test failed. Check logs for more information") + results[test_name] = test_result - def tests_syscollector_first_second_scan_consistency_index(self, host_manager, setup_vulnerability_tests, + if not test_result['checks']['all_successfull']: + pytest.fail("Inconsistencies found between first and second scan." + "Check evidences for more information") + else: + logger.critical("The number of vulnerabilities is the same between scans") + + def tests_syscollector_first_second_scan_consistency_index(self, request, host_manager, setup_vulnerability_tests, get_results): - results = get_results - test_name = 'tests_syscollector_first_second_scan_consistency_index' + """ + description: Ensure the consistency of the agent's vulnerabilities between the first and second scans in index. - agents_to_check = results.evidences['test_syscollector_first_scan_alerts']['agents_not_scanned_first_scan'] - if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): - pytest.skip("Syscollector scan not started in any agent. Skipping test") + This test ensure that alerts in the first scan are consistent with the alerts in the second scan in the index. + + tier: 0 + + parameters: + - request: pytest request object + - host_manager: + type: fixture + brief: Get the host manager of the environment + - setup_vulnerability_tests: + type: fixture + brief: Setup the environment to proceed with the testing + - get_results: fixture to get the results of global class tests + + assertions: + - Verify that the number of vulnerabilities is the same between scans + + cases: None + + tags: + - syscollector + - vulnerability_detector + """ + + results = get_results + test_name = request.node.name + test_result = { + 'checks': { + 'all_successfull': True, + }, + 'evidences': { + 'vulnerabilities_not_equal_between_scans_indexer': [], + 'vulnerabilities_index_first_scan': results['vulnerabilities_index_first_scan'], + 'vulnerabilities_index_second_scan': [] + } + } logger.critical("Checking vulnerabilities in the second scan") index_state_second_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', greater_than_timestamp=setup_vulnerability_tests) index_vulnerabilities_by_agent_second_scan = get_indexed_vulnerabilities_by_agent(index_state_second_scan) - results.vulnerabilities_index_second_scan = index_vulnerabilities_by_agent_second_scan + results['vulnerabilities_index_second_scan'] = index_vulnerabilities_by_agent_second_scan + test_result['evidences']['vulnerabilities_index_second_scan'] = index_vulnerabilities_by_agent_second_scan + + differences = list(set(results['vulnerabilities_index_first_scan']).symmetric_difference( + set(results['vulnerabilities_index_second_scan']))) - differences = list(set(results.vulnerabilities_index_first_scan).symmetric_difference( - set(results.vulnerabilities_index_second_scan))) + results[test_name] = test_result - if results.vulnerabilities_index_first_scan != results.vulnerabilities_index_second_scan: - results.checks['vulnerabilities_equal_between_scans_indexer'] = False + if results['vulnerabilities_index_first_scan'] != results['vulnerabilities_index_second_scan']: + test_result['checks']['all_successfull'] = False results[test_name]['evidences']['vulnerabilities_not_equal_between_scans_indexer'] = differences + pytest.fail('The number of vulnerabilities is not the same between scans') + else: + logger.critical("The number of vulnerabilities is the same between scans") - logger.critical("The number of vulnerabilities is not the same between scans") # ------------------------- @@ -643,10 +779,9 @@ def tests_syscollector_first_second_scan_consistency_index(self, host_manager, s for case in cases ] -dependencies = [None if 'depends' not in case else pytest.mark.depends(name=case['id'], - depends=case['depends']) for case in cases] list_ids = [case['id'] for case in cases] + class TestScanSyscollectorCases(): results = {} @@ -654,12 +789,17 @@ class TestScanSyscollectorCases(): def get_results(self): return self.results - - @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) #def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager): - def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager, get_results): + @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) + def test_vulnerability_detector_scans_cases(self, preconditions, body, teardown, setup, + host_manager, get_results): + setup_results = setup results = get_results + results['setup'] = setup_results + + import pdb; pdb.set_trace() + hosts_to_ignore = [] for host in setup_results.keys(): @@ -680,7 +820,7 @@ def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, precondit success_for_all_agents = True for host in result.keys(): - if result[host]['checks']['all_successfu1ll'] is False: + if result[host]['checks']['all_successfull'] is False: success_for_all_agents = False logger.critical(f"Test failed for host {host}. Check logs for more information") logger.critical(f"Evidences: {result[host]['evidences']}") From 7551cc2543c52fa22ceb22b76c885447cc97ace6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 11:55:59 +0000 Subject: [PATCH 115/174] feat: include remove vlc software playbook --- .../tools/playbooks/remove_package_win.yaml | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_package_win.yaml diff --git a/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_package_win.yaml b/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_package_win.yaml new file mode 100644 index 0000000000..1bd2cf3146 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_package_win.yaml @@ -0,0 +1,35 @@ +- name: Uninstall VLC + hosts: agent2 + become_method: runas + tasks: + # - name: Run VLC uninstall + # ansible.windows.win_command: + # cmd: "uninstall.exe /S /c -wait /norestart & timeout 10" + # chdir: '{{uninstall_script_path}}' + # become: yes + # become_user: Administrator + + + + # - name: Run VLC uninstall + # ansible.windows.win_powershell: + # script: "Start-Process '{{uninstall_script_path}}\\uninstall.exe' -ArgumentList '/S /c /norestart -wait'" + # become: yes + # become_user: Administrator + + - name: Run VLC uninstall + ansible.windows.win_powershell: + script: | + $VLCver = Get-ChildItem -Path HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall, HKLM:\SOFTWARE\Wow6432Node\Microsoft\Windows\CurrentVersion\Uninstall | + Get-ItemProperty | + Where-Object {$_.DisplayName -match 'VLC'} | + Select-Object -Property DisplayName, UninstallString, DisplayVersion + + ForEach ($ver in $VLCver) { + If ($ver.UninstallString) { + $uninst = $ver.UninstallString + & cmd /c $uninst /norestart /S -wait + } + } + Start-Process '{{uninstall_script_path}}\\uninstall.exe' -ArgumentList '/S /c /norestart -wait /L-1033 /S' + From ebd738a1ed649f05fcfcc1d06fc79e0c0bcf8594 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 13:55:19 +0000 Subject: [PATCH 116/174] docs: improve configuraiton docstrings --- .../wazuh_testing/end_to_end/configuration.py | 93 +++++++++++++++++-- 1 file changed, 87 insertions(+), 6 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py index 88829f3101..664117e49c 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -1,5 +1,5 @@ """ -Configurations handler for remote hosts. +Module for change configurations of remote hosts. ---------------------------------------- This module provides functions for configuring and managing remote host @@ -16,9 +16,11 @@ Created by Wazuh, Inc. . This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ -from multiprocessing.pool import ThreadPool -from typing import Dict import xml.dom.minidom +import logging + +from multiprocessing.pool import ThreadPool +from typing import Dict, List from wazuh_testing.end_to_end import configuration_filepath_os from wazuh_testing.tools.configuration import set_section_wazuh_conf @@ -34,6 +36,12 @@ def backup_configurations(host_manager: HostManager) -> Dict[str, str]: Returns: dict: A dictionary mapping host names to their configurations. + + Example of returned dictionary: + { + 'manager': '...', + 'agent1': ... + } """ backup_configurations = {} for host in host_manager.get_group_hosts('all'): @@ -46,13 +54,19 @@ def backup_configurations(host_manager: HostManager) -> Dict[str, str]: return backup_configurations -def restore_configuration(host_manager: HostManager, configuration: Dict[str, str]) -> None: +def restore_configuration(host_manager: HostManager, configuration: Dict[str, List]) -> None: """ Restore configurations for all hosts in the specified host manager. Args: host_manager: An instance of the HostManager class containing information about hosts. configuration: A dictionary mapping host names to their configurations. + + Example of configuration dictionary: + { + 'manager': '...', + 'agent1': ... + } """ for host in host_manager.get_group_hosts('all'): @@ -68,9 +82,45 @@ def configure_host(host: str, host_configuration: Dict[str, Dict], host_manager: Args: host: The name of the host to be configured. - host_configuration: Role of the configured host for the host. + host_configuration: Role of the configured host for the host. Check below for example. host_manager: An instance of the HostManager class containing information about hosts. + + Note: The host_configuration dictionary must contain a list of sections and elements to be configured. The sections + not included in the dictionary will not be modified maintaining the current configuration. + + + Example of host_configuration dictionary: + { + "manager1":[ + { + "sections":[ + { + "section":"vulnerability-detection", + "elements":[ + { + "enabled":{ + "value":"yes" + } + }, + { + "index-status":{ + "value":"yes" + } + }, + { + "feed-update-interval":{ + "value":"2h" + } + } + ] + }, + ], + "metadata":{} + } + ], + } """ + logging.info(f"Configuring host {host_configuration}") host_os = host_manager.get_host_variables(host)['os_name'] config_file_path = configuration_filepath_os[host_os] @@ -101,13 +151,44 @@ def configure_host(host: str, host_configuration: Dict[str, Dict], host_manager: host_manager.modify_file_content(str(host), config_file_path, final_configuration) -def configure_environment(host_manager: HostManager, configurations: Dict[str, str]) -> None: +def configure_environment(host_manager: HostManager, configurations: Dict[str, List]) -> None: """ Configure the environment for all hosts in the specified host manager. Args: host_manager: An instance of the HostManager class containing information about hosts. configurations: A dictionary mapping host roles to their configuration details. + + Example of host_configurations dictionary: + { + "manager1":[ + { + "sections":[ + { + "section":"vulnerability-detection", + "elements":[ + { + "enabled":{ + "value":"yes" + } + }, + { + "index-status":{ + "value":"yes" + } + }, + { + "feed-update-interval":{ + "value":"2h" + } + } + ] + }, + ], + "metadata":{} + } + ], + } """ configure_environment_parallel_map = [(host, configurations) for host in host_manager.get_group_hosts('all')] From 120de9d898a3ad94accdd9c59f0f60db037b4767 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 13:58:32 +0000 Subject: [PATCH 117/174] docs: improve logs and indexer docstrings and typing --- deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py | 1 - deps/wazuh_testing/wazuh_testing/end_to_end/logs.py | 5 ++++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index d06f44844a..725a577f0d 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -7,7 +7,6 @@ Functions: - get_indexer_values: Retrieves values from the Indexer API. - Copyright (C) 2015, Wazuh Inc. Created by Wazuh, Inc. . This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py index f0447627cb..6387efa55e 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py @@ -7,12 +7,15 @@ Functions: - truncate_remote_host_group_files: Truncate the specified files in all the host of a group + - get_hosts_logs: Get the logs from the specified host group Copyright (C) 2015, Wazuh Inc. Created by Wazuh, Inc. . This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ +from typing import Dict + from wazuh_testing import ALERTS_JSON_PATH from wazuh_testing.end_to_end import logs_filepath_os from wazuh_testing.tools.system import HostManager @@ -41,7 +44,7 @@ def truncate_remote_host_group_files(host_manager: HostManager, host_group: str, host_manager.truncate_file(host, log_file_path) -def get_hosts_logs(host_manager: HostManager, host_group: str = 'all') -> dict: +def get_hosts_logs(host_manager: HostManager, host_group: str = 'all') -> Dict[str, str]: """ Get the logs from the specified host group. From bb11bb1235564a163df42297f02fe714280671df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:01:52 +0000 Subject: [PATCH 118/174] feat: include logging to configuration and logs modules --- .../wazuh_testing/end_to_end/configuration.py | 13 ++++++++++--- .../wazuh_testing/end_to_end/indexer_api.py | 3 ++- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py index 664117e49c..324febaacb 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/configuration.py @@ -43,6 +43,7 @@ def backup_configurations(host_manager: HostManager) -> Dict[str, str]: 'agent1': ... } """ + logging.info("Backing up configurations") backup_configurations = {} for host in host_manager.get_group_hosts('all'): host_os_name = host_manager.get_host_variables(host)['os_name'] @@ -50,7 +51,7 @@ def backup_configurations(host_manager: HostManager) -> Dict[str, str]: backup_configurations[host] = host_manager.get_file_content(str(host), configuration_filepath) - + logging.info("Configurations backed up") return backup_configurations @@ -68,12 +69,13 @@ def restore_configuration(host_manager: HostManager, configuration: Dict[str, Li 'agent1': ... } """ - + logging.info("Restoring configurations") for host in host_manager.get_group_hosts('all'): host_os_name = host_manager.get_host_variables(host)['os_name'] configuration_filepath = configuration_filepath_os[host_os_name] host_manager.modify_file_content(host, configuration_filepath, configuration[host]) + logging.info("Configurations restored") def configure_host(host: str, host_configuration: Dict[str, Dict], host_manager: HostManager) -> None: @@ -120,7 +122,7 @@ def configure_host(host: str, host_configuration: Dict[str, Dict], host_manager: ], } """ - logging.info(f"Configuring host {host_configuration}") + logging.info(f"Configuring host {host}") host_os = host_manager.get_host_variables(host)['os_name'] config_file_path = configuration_filepath_os[host_os] @@ -150,6 +152,8 @@ def configure_host(host: str, host_configuration: Dict[str, Dict], host_manager: host_manager.modify_file_content(str(host), config_file_path, final_configuration) + logging.info(f"Host {host} configured") + def configure_environment(host_manager: HostManager, configurations: Dict[str, List]) -> None: """ @@ -190,8 +194,11 @@ def configure_environment(host_manager: HostManager, configurations: Dict[str, L ], } """ + logging.info("Configuring environment") configure_environment_parallel_map = [(host, configurations) for host in host_manager.get_group_hosts('all')] with ThreadPool() as pool: pool.starmap(configure_host, [(host, config, host_manager) for host, config in configure_environment_parallel_map]) + + logging.info("Environment configured") diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index 725a577f0d..441bbacc38 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -36,7 +36,8 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' Returns: Dict: A dictionary containing the values retrieved from the Indexer API. """ - logging.debug(f"Getting values from the Indexer API for index {index}.") + logging.info(f"Getting values from the Indexer API for index {index}") + url = f"https://{host_manager.get_master_ip()}:9200/{index}/_search" headers = { 'Content-Type': 'application/json', From 8aabf2bd5ccfb64ad4208e5b58b1f6e224f0da62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:25:34 +0000 Subject: [PATCH 119/174] refac: removing duplicate function --- .../wazuh_testing/end_to_end/monitoring.py | 94 ++++++++++++------- 1 file changed, 61 insertions(+), 33 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index f1440aa296..e923c557e4 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -8,7 +8,6 @@ Functions: - monitoring_events_multihost: Monitor events on multiple hosts concurrently. - generate_monitoring_logs: Generate monitoring data for logs on all agent hosts. - - generate_monitoring_logs_manager: Generate monitoring data for logs on a specific manager host. Copyright (C) 2015, Wazuh Inc. @@ -35,6 +34,32 @@ def monitoring_events_multihost(host_manager: HostManager, monitoring_data: Dict host_manager: An instance of the HostManager class containing information about hosts. monitoring_data: A dictionary containing monitoring data for each host. ignore_error: If True, ignore errors and continue monitoring. + + Returns: + dict: A dictionary containing the monitoring results. + + Example of monitoring_data: + { + "manager1":[ + { + "regex":"INFO: Action for 'vulnerability_feed_manager' finished", + "file":"/var/ossec/logs/ossec.log", + "timeout":1000, + "n_iterations":1, + "greater_than_timestamp":"" + } + ] + } + Example of monitoring_result: + { + "manager1":{ + "not_found":[ + ], + "found":[ + "INFO: Action for 'vulnerability_feed_manager' finished" + ] + } + } """ def monitoring_event(host_manager: HostManager, host: str, monitoring_elements: List[Dict], scan_interval: int = 20, ignore_error: bool = False) -> Dict: @@ -115,6 +140,8 @@ def filter_events_by_timestamp(match_events: List) -> List: return monitoring_result + logging.info(f"Monitoring the following elements: {monitoring_data}") + with ThreadPoolExecutor() as executor: futures = [] for host, data in monitoring_data.items(): @@ -128,13 +155,15 @@ def filter_events_by_timestamp(match_events: List) -> List: except Exception as e: logging.error(f"An error occurred: {e}") + logging.info(f"Monitoring results: {results}") + return results def generate_monitoring_logs(host_manager: HostManager, regex_list: List[str], timeout_list: List[str], hosts: List[str], n_iterations=1, greater_than_timestamp: str = '') -> Dict: """ - Generate monitoring data for logs on all agent hosts. + Generate monitoring data for logs on all provided hosts. Args: host_manager: An instance of the HostManager class containing information about hosts. @@ -146,45 +175,44 @@ def generate_monitoring_logs(host_manager: HostManager, regex_list: List[str], t Returns: dict: Monitoring data for logs on all agent hosts. + + Example of monitoring_data: + { + "agent1":[ + { + "regex":"INFO: Action for 'vulnerability_feed_manager' finished", + "file":"/var/ossec/logs/ossec.log", + "timeout":1000, + "n_iterations":1, + "greater_than_timestamp":"" + } + ] + } + """ monitoring_data = {} - for agent in hosts: - monitoring_data[agent] = [] + if len(regex_list) == 1: + logging.info("Using the same regex for all hosts") + regex_list = regex_list * len(hosts) + elif len(regex_list) != len(hosts): + raise ValueError("The number of regexes must be equal to the number of hosts") + + if len(timeout_list) == 1: + logging.info("Using the same timeout for all hosts") + timeout_list = timeout_list * len(hosts) + elif len(timeout_list) != len(hosts): + raise ValueError("The number of timeouts must be equal to the number of hosts") + + for host in hosts: + monitoring_data[host] = [] for index, regex_index in enumerate(regex_list): - os_name = host_manager.get_host_variables(agent)['os_name'] - monitoring_data[agent].append({ + os_name = host_manager.get_host_variables(host)['os_name'] + monitoring_data[host].append({ 'regex': regex_index, 'file': logs_filepath_os[os_name], 'timeout': timeout_list[index], 'n_iterations': n_iterations, 'greater_than_timestamp': greater_than_timestamp }) - return monitoring_data - - -def generate_monitoring_logs_manager(host_manager: HostManager, manager: str, regex: str, timeout: int, - n_iterations: int = 1, greater_than_timestamp: str = '') -> Dict: - """ - Generate monitoring data for logs on a specific manager host. - - Args: - host_manager: An instance of the HostManager class containing information about hosts. - manager: The target manager host. - regex: The regular expression for monitoring. - timeout: The timeout value for monitoring. - greater_than_timestamp: The timestamp to filter the results. Defaults to None. - - Returns: - dict: Monitoring data for logs on the specified manager host. - """ - monitoring_data = {} - os_name = host_manager.get_host_variables(manager)['os_name'] - monitoring_data[manager] = [{ - 'regex': regex, - 'file': logs_filepath_os[os_name], - 'timeout': timeout, - 'n_iterations': n_iterations, - 'greater_than_timestamp': greater_than_timestamp - }] return monitoring_data From 7f352af7d72c1ca92ffc63e520973c1d255f36e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:31:15 +0000 Subject: [PATCH 120/174] feat: include logging to regex module --- deps/wazuh_testing/wazuh_testing/end_to_end/regex.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py index 2ac2e5d1aa..dfc34533c2 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py @@ -2,7 +2,8 @@ Regex Patterns for Syscollector Events. --------------------------------------- -This module defines regular expression patterns for various events related to Syscollector. The patterns are used to extract information from log messages. +This module defines regular expression patterns for various events related to Syscollector. +The patterns are used to extract information from log messages. Constants: REGEX_PATTERNS (dict): A dictionary mapping event names to their respective regex patterns and parameters. @@ -15,9 +16,8 @@ Created by Wazuh, Inc. . This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ - - from typing import Dict +import logging REGEX_PATTERNS = { @@ -61,6 +61,9 @@ def get_event_regex(event: Dict) -> str: Raises: Exception: If required parameters are missing. """ + + logging.info(f"Getting regex for event {event['event']}") + expected_event = REGEX_PATTERNS.get(event['event']) if expected_event is None: @@ -70,7 +73,6 @@ def get_event_regex(event: Dict) -> str: if 'parameters' in expected_event and 'parameters' not in event: raise Exception(f"Not provided enough data to create regex. Missing {expected_event['parameters']}") - elif 'parameters' in event: for parameter in expected_event['parameters']: expected_regex = expected_regex.replace(parameter, event['parameters'].get(parameter, '')) From 001e44d325c48f9ac512f9ff1ed6ec3faafb5773 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:32:07 +0000 Subject: [PATCH 121/174] docs: improve regex module docstrings --- deps/wazuh_testing/wazuh_testing/end_to_end/regex.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py index dfc34533c2..b5126c4a2b 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py @@ -60,8 +60,18 @@ def get_event_regex(event: Dict) -> str: Raises: Exception: If required parameters are missing. - """ + Example of event: + { + 'event': 'syscollector_install_package_alert_yum', + 'parameters': { + 'HOST_NAME': 'agent1', + 'PACKAGE_NAME': 'openssh-server', + 'PACKAGE_VERSION': '8.0p1-4', + 'ARCHITECTURE': 'x86_64' + } + } + """ logging.info(f"Getting regex for event {event['event']}") expected_event = REGEX_PATTERNS.get(event['event']) From 99cf24d12ba2d5a46f17cb65a7fac819913bb0e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:35:04 +0000 Subject: [PATCH 122/174] style: format manager and agent configurations --- .../configurations/agent.yaml | 8 +++--- .../configurations/manager.yaml | 25 ++++--------------- 2 files changed, 8 insertions(+), 25 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml b/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml index 32edc6424e..6bcc2eb290 100644 --- a/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml +++ b/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml @@ -3,22 +3,20 @@ elements: - enabled: value: 'no' - - section: rootcheck elements: - disabled: value: 'yes' - - section: syscheck elements: - disabled: value: 'no' - - section: wodle attributes: - - name: 'syscollector' + - name: syscollector elements: - disabled: value: 'no' - interval: - value: '1m' + value: 1m + diff --git a/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml index 573458d967..e62289be13 100644 --- a/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml +++ b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml @@ -6,8 +6,7 @@ - index-status: value: 'yes' - feed-update-interval: - value: '2h' - + value: 2h - section: indexer elements: - enabled: @@ -15,7 +14,7 @@ - hosts: elements: - host: - value: "https://INDEXER_SERVER:9200" + value: 'https://INDEXER_SERVER:9200' - username: value: INDEXER_USERNAME - password: @@ -30,54 +29,40 @@ value: FILEBEAT_CERTIFICATE - key: value: FILEBEAT_KEY - - section: sca elements: - enabled: value: 'no' - - section: rootcheck elements: - disabled: value: 'yes' - - section: syscheck elements: - disabled: value: 'yes' - - section: wodle attributes: - - name: 'syscollector' + - name: syscollector elements: - disabled: value: 'no' - - - - - - - - - section: sca elements: - enabled: value: 'no' - - section: rootcheck elements: - disabled: value: 'yes' - - section: syscheck elements: - disabled: value: 'yes' - - section: wodle attributes: - - name: 'syscollector' + - name: syscollector elements: - disabled: value: 'no' + From e0e24b75702d56aa9222803ae868a4cd87936ec7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:36:37 +0000 Subject: [PATCH 123/174] style: remove extra whitespaces --- tests/end_to_end/conftest.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/end_to_end/conftest.py b/tests/end_to_end/conftest.py index 7e6a91751e..d9dd577694 100644 --- a/tests/end_to_end/conftest.py +++ b/tests/end_to_end/conftest.py @@ -346,5 +346,3 @@ def pytest_addoption(parser): type=str, help='Ansible roles path.', ) - - From c12db5a3530def6f6ad048c71504b86c9c59e642 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:38:06 +0000 Subject: [PATCH 124/174] fix: packages lists JSON format --- .../vuln_packages.json | 792 +++++++++++------- 1 file changed, 493 insertions(+), 299 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json index fbdabd88a2..95f8ceeca6 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json @@ -1,324 +1,518 @@ { - "nmap-6.46": { - "package_name": "nmap", - "package_version": "6.46-1", - "CVE": ["CVE-2018-15173"], - "urls": { - "centos": { - "amd64": "https://nmap.org/dist/nmap-6.46-1.x86_64.rpm" - } - }, - "uninstall_name": "nmap*" + "nmap-6.46": { + "package_name": "nmap", + "package_version": "6.46-1", + "CVE": [ + "CVE-2018-15173" + ], + "urls": { + "centos": { + "amd64": "https://nmap.org/dist/nmap-6.46-1.x86_64.rpm" + } }, - "grafana-8.5.5": { - "package_name": "grafana", - "package_version": "8.5.5", - "CVE": ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] - "urls": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.aarch64.rpm" - }, - "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb" - } - }, - "uninstall_name": "grafana*" + "uninstall_name": "nmap*" + }, + "grafana-8.5.5": { + "package_name": "grafana", + "package_version": "8.5.5", + "CVE": [ + "CVE-2023-2183", + "CVE-2023-1410", + "CVE-2023-0594", + "CVE-2023-0507", + "CVE-2022-39324", + "CVE-2022-39307", + "CVE-2022-39306", + "CVE-2022-39229", + "CVE-2022-39201", + "CVE-2022-36062", + "CVE-2022-35957", + "CVE-2022-31130", + "CVE-2022-31123", + "CVE-2022-31107", + "CVE-2022-31097", + "CVE-2022-23552", + "CVE-2022-23498" + ], + "urls": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb" + } }, - "grafana-8.5.6": { - "package_name": "grafana", - "package_version": "8.5.6", - "CVE": ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] - "urls": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.6-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.6-1.aarch64.rpm" - }, - } - "uninstall_name": "grafana*" + "uninstall_name": "grafana*" + }, + "grafana-8.5.6": { + "package_name": "grafana", + "package_version": "8.5.6", + "CVE": [ + "CVE-2023-2183", + "CVE-2023-1410", + "CVE-2023-0594", + "CVE-2023-0507", + "CVE-2022-39324", + "CVE-2022-39307", + "CVE-2022-39306", + "CVE-2022-39229", + "CVE-2022-39201", + "CVE-2022-36062", + "CVE-2022-35957", + "CVE-2022-31130", + "CVE-2022-31123", + "CVE-2022-31107", + "CVE-2022-31097", + "CVE-2022-23552", + "CVE-2022-23498" + ], + "urls": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.6-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.6-1.aarch64.rpm" + } }, - "grafana-9.1.1": { - "package_name": "grafana", - "package_version": "9.1.1", - "CVE": ["CVE-2023-2183", "CVE-2023-1387", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-23552", "CVE-2022-23498"], - "url": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.1.1-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.1.1-1.aarch64.rpm" - }, - "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb" - } - }, - "uninstall_name": "grafana*" + "uninstall_name": "grafana*" + }, + "grafana-9.1.1": { + "package_name": "grafana", + "package_version": "9.1.1", + "CVE": [ + "CVE-2023-2183", + "CVE-2023-1387", + "CVE-2022-39324", + "CVE-2022-39307", + "CVE-2022-39306", + "CVE-2022-39229", + "CVE-2022-39201", + "CVE-2022-36062", + "CVE-2022-35957", + "CVE-2022-31130", + "CVE-2022-31123", + "CVE-2022-23552", + "CVE-2022-23498" + ], + "url": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.1.1-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.1.1-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb" + } }, - "grafana-9.2.0": { - "package_name": "grafana", - "package_version": "9.2.0", - "CVE": ["CVE-2021-25804", "CVE-2021-25803", "CVE-2021-25802", "CVE-2021-25801","CVE-2020-26664"], - "url": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.2.0-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.2.0-1.aarch64.rpm" - }, - "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_arm64.deb" - }, - "uninstall_name": "grafana*" - }, - "grafana-9.4.17": { - "package_name": "grafana", - "package_version": "9.4.17", - "CVE": [], - "url": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.4.17-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.4.17-1.aarch64.rpm" - }, - "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.4.17_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.4.17_arm64.deb" - } - }, - "uninstall_name": "grafana*" + "uninstall_name": "grafana*" + }, + "grafana-9.2.0": { + "package_name": "grafana", + "package_version": "9.2.0", + "CVE": [ + "CVE-2021-25804", + "CVE-2021-25803", + "CVE-2021-25802", + "CVE-2021-25801", + "CVE-2020-26664" + ], + "url": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.2.0-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.2.0-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_arm64.deb" + }, + "uninstall_name": "grafana*" + } + }, + "grafana-9.4.17": { + "package_name": "grafana", + "package_version": "9.4.17", + "CVE": [], + "url": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.4.17-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.4.17-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.4.17_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.4.17_arm64.deb" + } }, - "grafana-9.5.13": { - "package_name": "grafana-enterprise", - "package_version": "9.5.13", - "CVE": [], - "url": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.5.13-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.5.13-1.aarch64.rpm" - }, - "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.5.13_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.5.13_arm64.deb" - } - }, - "uninstall_name": "grafana*" + "uninstall_name": "grafana*" + }, + "grafana-9.5.13": { + "package_name": "grafana-enterprise", + "package_version": "9.5.13", + "CVE": [], + "url": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.5.13-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.5.13-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.5.13_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.5.13_arm64.deb" + } }, - "grafana-10.0.0": - { - "package_name": "grafana-enterprise", - "package_version": "10.0.0", - "CVE": ["CVE-2023-4822", "CVE-2023-4399"], - "url": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-10.0.0-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-10.0.0-1.aarch64.rpm" - }, - "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_10.0.0_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_10.0.0_arm64.deb" - } - }, - "uninstall_name": "grafana*" + "uninstall_name": "grafana*" + }, + "grafana-10.0.0": { + "package_name": "grafana-enterprise", + "package_version": "10.0.0", + "CVE": [ + "CVE-2023-4822", + "CVE-2023-4399" + ], + "url": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-10.0.0-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-10.0.0-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_10.0.0_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_10.0.0_arm64.deb" + } }, - "vlc-3.0.6": { - "package_name": "VLC media player" - "package_version": "3.0.6", - "CVE": ["CVE-2023-47360", "CVE-2023-47359", "CVE-2023-46814", "CVE-2022-41325", "CVE-2020-26664", "CVE-2019-19721", "CVE-2019-13962", "CVE-2019-13602", "CVE-2019-12874", "CVE-2019-5460", "CVE-2019-5459", "CVE-2019-5439"], - "urls": { - "windows": { - "amd64": "https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe" - } - }, - "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", - "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + "uninstall_name": "grafana*" + }, + "vlc-3.0.6": { + "package_name": "VLC media player", + "package_version": "3.0.6", + "CVE": [ + "CVE-2023-47360", + "CVE-2023-47359", + "CVE-2023-46814", + "CVE-2022-41325", + "CVE-2020-26664", + "CVE-2019-19721", + "CVE-2019-13962", + "CVE-2019-13602", + "CVE-2019-12874", + "CVE-2019-5460", + "CVE-2019-5459", + "CVE-2019-5439" + ], + "urls": { + "windows": { + "amd64": "https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe" + } }, - "vlc-3.0.7": { - "package_name": "VLC media player" - "package_version": "3.0.7", - "CVE": ["CVE-2023-47360", "CVE-2023-47359", "CVE-2023-46814", "CVE-2022-41325", "CVE-2020-26664", "CVE-2019-19721", "CVE-2019-13962", "CVE-2019-13602", "CVE-2019-12874"] - "url": { - "windows": { - "amd64": "https://get.videolan.org/vlc/3.0.7/win64/vlc-3.0.7-win64.exe" - } - }, - "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" - "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", + "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + }, + "vlc-3.0.7": { + "package_name": "VLC media player", + "package_version": "3.0.7", + "CVE": [ + "CVE-2023-47360", + "CVE-2023-47359", + "CVE-2023-46814", + "CVE-2022-41325", + "CVE-2020-26664", + "CVE-2019-19721", + "CVE-2019-13962", + "CVE-2019-13602", + "CVE-2019-12874" + ], + "url": { + "windows": { + "amd64": "https://get.videolan.org/vlc/3.0.7/win64/vlc-3.0.7-win64.exe" + } }, - "vlc-3.0.7.1": { - "package_name": "VLC media player", - "package_version": "3.0.7.1", - "CVE": ["CVE-2019-14970", "CVE-2019-14778", "CVE-2019-14777", "CVE-2019-14776", "CVE-2019-14535", "CVE-2019-14534", "CVE-2019-14533", "CVE-2019-14498", "CVE-2019-14438", "CVE-2019-14437", "CVE-2019-13602"], - "url": { - "windows": { - "amd64": "https://get.videolan.org/vlc/3.0.7.1/win64/vlc-3.0.7.1-win64.exe" - } - }, - "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", - "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", + "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + }, + "vlc-3.0.7.1": { + "package_name": "VLC media player", + "package_version": "3.0.7.1", + "CVE": [ + "CVE-2019-14970", + "CVE-2019-14778", + "CVE-2019-14777", + "CVE-2019-14776", + "CVE-2019-14535", + "CVE-2019-14534", + "CVE-2019-14533", + "CVE-2019-14498", + "CVE-2019-14438", + "CVE-2019-14437", + "CVE-2019-13602" + ], + "url": { + "windows": { + "amd64": "https://get.videolan.org/vlc/3.0.7.1/win64/vlc-3.0.7.1-win64.exe" + } }, - "vlc-3.0.11": { - "package_name": "VLC media player", - "package_version": "3.0.11", - "CVE": ["CVE-2021-25804", "CVE-2021-25803", "CVE-2021-25802", "CVE-2021-25801","CVE-2020-26664"], - "url": { - "windows": { - "amd64": "https://get.videolan.org/vlc/3.0.11/win64/vlc-3.0.11-win64.exe" - } - }, - "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", - "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", + "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + }, + "vlc-3.0.11": { + "package_name": "VLC media player", + "package_version": "3.0.11", + "CVE": [ + "CVE-2021-25804", + "CVE-2021-25803", + "CVE-2021-25802", + "CVE-2021-25801", + "CVE-2020-26664" + ], + "url": { + "windows": { + "amd64": "https://get.videolan.org/vlc/3.0.11/win64/vlc-3.0.11-win64.exe" + } }, - "vlc-3.0.20": { - "package_name": "VLC media player", - "package_version": "3.0.20", - "CVE": [], - "url": { - "windows": { - "amd64": "https://get.videolan.org/vlc/3.0.20/win64/vlc-3.0.20-win64.exe" - } - }, - "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", - "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", + "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + }, + "vlc-3.0.20": { + "package_name": "VLC media player", + "package_version": "3.0.20", + "CVE": [], + "url": { + "windows": { + "amd64": "https://get.videolan.org/vlc/3.0.20/win64/vlc-3.0.20-win64.exe" + } }, - - "node-v17.0.1": { - "package_name": "node", - "package_version": "17.0.1", - "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], - "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg", - "arm64v8": "https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg" - } - }, - "uninstall_name": "node*" + "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", + "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + }, + "node-v17.0.1": { + "package_name": "node", + "package_version": "17.0.1", + "CVE": [ + "CVE-2022-21824", + "CVE-2022-0778", + "CVE-2021-44533", + "CVE-2021-44532", + "CVE-2021-44531", + "CVE-2021-4044" + ], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg", + "arm64v8": "https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg" + } }, - "node-v17.1.0": { - "package_name": "node", - "package_version": "17.1.0", - "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], - "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg", - "arm64v8": "https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg" - } - }, - "uninstall_name": "node*" + "uninstall_name": "node*" + }, + "node-v17.1.0": { + "package_name": "node", + "package_version": "17.1.0", + "CVE": [ + "CVE-2022-21824", + "CVE-2022-0778", + "CVE-2021-44533", + "CVE-2021-44532", + "CVE-2021-44531", + "CVE-2021-4044" + ], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg", + "arm64v8": "https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg" + } }, - "node-v18.0.0": { - "package_name": "node", - "package_version": "18.0.0", - "CVE": ["CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30589", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-30581", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-43548", "CVE-2022-35256", "CVE-2022-35255", "CVE-2022-32223", "CVE-2022-32222", "CVE-2022-32215", "CVE-2022-32214", "CVE-2022-32213", "CVE-2022-32212", "CVE-2022-3786", "CVE-2022-3602"], - "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v18.0.0/node-v18.0.0.pkg", - "arm64v8": "https://nodejs.org/dist/v18.0.0/node-v18.0.0.pkg" - } - }, - "uninstall_name": "node*" + "uninstall_name": "node*" + }, + "node-v18.0.0": { + "package_name": "node", + "package_version": "18.0.0", + "CVE": [ + "CVE-2023-38552", + "CVE-2023-32559", + "CVE-2023-32006", + "CVE-2023-32002", + "CVE-2023-30590", + "CVE-2023-30589", + "CVE-2023-30588", + "CVE-2023-30585", + "CVE-2023-30581", + "CVE-2023-23920", + "CVE-2023-23919", + "CVE-2023-23918", + "CVE-2022-43548", + "CVE-2022-35256", + "CVE-2022-35255", + "CVE-2022-32223", + "CVE-2022-32222", + "CVE-2022-32215", + "CVE-2022-32214", + "CVE-2022-32213", + "CVE-2022-32212", + "CVE-2022-3786", + "CVE-2022-3602" + ], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v18.0.0/node-v18.0.0.pkg", + "arm64v8": "https://nodejs.org/dist/v18.0.0/node-v18.0.0.pkg" + } }, - "node-v18.11.0": { - "package_name": "node", - "package_version": "18.11.0", - "CVE": ["CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-30581", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-32222"], - "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v18.11.0/node-v18.11.0.pkg", - "arm64v8": "https://nodejs.org/dist/v18.11.0/node-v18.11.0.pkg" - } - }, - "uninstall_name": "node*" + "uninstall_name": "node*" + }, + "node-v18.11.0": { + "package_name": "node", + "package_version": "18.11.0", + "CVE": [ + "CVE-2023-38552", + "CVE-2023-32559", + "CVE-2023-32006", + "CVE-2023-32002", + "CVE-2023-30590", + "CVE-2023-30588", + "CVE-2023-30585", + "CVE-2023-30581", + "CVE-2023-23920", + "CVE-2023-23919", + "CVE-2023-23918", + "CVE-2022-32222" + ], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v18.11.0/node-v18.11.0.pkg", + "arm64v8": "https://nodejs.org/dist/v18.11.0/node-v18.11.0.pkg" + } }, - "node-v18.12.0": { - "package_name": "node", - "package_version": "18.12.0", - "CVE": ["CVE-2023-44487", "CVE-2023-38552", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-23936", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-43548", "CVE-2022-3786", "CVE-2022-3602"], - "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v18.12.0/node-v18.12.0.pkg", - "arm64v8": "https://nodejs.org/dist/v18.12.0/node-v18.12.0.pkg" - } - }, - "uninstall_name": "node*" + "uninstall_name": "node*" + }, + "node-v18.12.0": { + "package_name": "node", + "package_version": "18.12.0", + "CVE": [ + "CVE-2023-44487", + "CVE-2023-38552", + "CVE-2023-32002", + "CVE-2023-30590", + "CVE-2023-30588", + "CVE-2023-30585", + "CVE-2023-23936", + "CVE-2023-23920", + "CVE-2023-23919", + "CVE-2023-23918", + "CVE-2022-43548", + "CVE-2022-3786", + "CVE-2022-3602" + ], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v18.12.0/node-v18.12.0.pkg", + "arm64v8": "https://nodejs.org/dist/v18.12.0/node-v18.12.0.pkg" + } }, - "node-v19.5.0": { - "package_name": "node", - "package_version": "19.5.0", - "CVE": [], - "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg", - "arm64v8": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg" - } - "windows": "https://nodejs.org/dist/v19.7.0/win-x86/node.exe" - }, - "uninstall_name": "node*" + "uninstall_name": "node*" + }, + "node-v19.5.0": { + "package_name": "node", + "package_version": "19.5.0", + "CVE": [], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg", + "arm64v8": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg" + }, + "windows": "https://nodejs.org/dist/v19.7.0/win-x86/node.exe" }, - "node-v19.6.0": { - "package_name": "node", - "package_version": "19.6.0", - "CVE": [], - "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v19.6.0/node-v19.6.0.pkg", - "arm64v8": "https://nodejs.org/dist/v19.6.0/node-v19.6.0.pkg" - } - }, - "uninstall_name": "node*" + "uninstall_name": "node*" + }, + "node-v19.6.0": { + "package_name": "node", + "package_version": "19.6.0", + "CVE": [], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v19.6.0/node-v19.6.0.pkg", + "arm64v8": "https://nodejs.org/dist/v19.6.0/node-v19.6.0.pkg" + } }, - "node-v20.0.0": { - "package_name": "node", - "package_version": "20.0.0", - "CVE": ["CVE-2022-21824"], - "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v20.0.0/node-v20.0.0.pkg", - "arm64v8": "https://nodejs.org/dist/v20.0.0/node-v20.0.0.pkg" - } - }, - "uninstall_name": "node*" + "uninstall_name": "node*" + }, + "node-v20.0.0": { + "package_name": "node", + "package_version": "20.0.0", + "CVE": [ + "CVE-2022-21824" + ], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v20.0.0/node-v20.0.0.pkg", + "arm64v8": "https://nodejs.org/dist/v20.0.0/node-v20.0.0.pkg" + } }, - "node-v20.1.0": { - "package_name": "node", - "package_version": "20.1.0", - "CVE": ["CVE-2023-44487", "CVE-2023-39332", "CVE-2023-39331", "CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32558", "CVE-2023-32006", "CVE-2023-32005", "CVE-2023-32004", "CVE-2023-32003", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30586", "CVE-2023-30585", "CVE-2023-30581"], - "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v20.1.0/node-v20.1.0.pkg", - "arm64v8": "https://nodejs.org/dist/v20.1.0/node-v20.1.0.pkg" - } - }, - "uninstall_name": "node*" + "uninstall_name": "node*" + }, + "node-v20.1.0": { + "package_name": "node", + "package_version": "20.1.0", + "CVE": [ + "CVE-2023-44487", + "CVE-2023-39332", + "CVE-2023-39331", + "CVE-2023-38552", + "CVE-2023-32559", + "CVE-2023-32558", + "CVE-2023-32006", + "CVE-2023-32005", + "CVE-2023-32004", + "CVE-2023-32003", + "CVE-2023-32002", + "CVE-2023-30590", + "CVE-2023-30588", + "CVE-2023-30586", + "CVE-2023-30585", + "CVE-2023-30581" + ], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v20.1.0/node-v20.1.0.pkg", + "arm64v8": "https://nodejs.org/dist/v20.1.0/node-v20.1.0.pkg" + } }, - "lynx-2.8.8":{ - "package_name": "lynx", - "package_version": "2.8.8-0.3.dev15.el7", - "CVE": ["CVE-2021-38165"], - "urls": { - "centos": { - "amd64": "https://download.cf.centos.org/centos/7/os/x86_64/Packages/lynx-2.8.8-0.3.dev15.el7.x86_64.rpm" - } - }, - "uninstall_name": "lynx*" + "uninstall_name": "node*" + }, + "lynx-2.8.8": { + "package_name": "lynx", + "package_version": "2.8.8-0.3.dev15.el7", + "CVE": [ + "CVE-2021-38165" + ], + "urls": { + "centos": { + "amd64": "https://download.cf.centos.org/centos/7/os/x86_64/Packages/lynx-2.8.8-0.3.dev15.el7.x86_64.rpm" + } }, - "firefox-78.9.0":{ - "package_name": "firefox", - "package_version": "78.9.0-1.el7.centos", - "CVE": ["CVE-2023-6873", "CVE-2023-6872", "CVE-2022-38478"], - "urls": { - "centos": { - "amd64": "https://download.cf.centos.org/centos/7/updates/x86_64/Packages/firefox-78.9.0-1.el7.centos.x86_64.rpm" - } - }, - "uninstall_name": "firefox*" + "uninstall_name": "lynx*" + }, + "firefox-78.9.0": { + "package_name": "firefox", + "package_version": "78.9.0-1.el7.centos", + "CVE": [ + "CVE-2023-6873", + "CVE-2023-6872", + "CVE-2022-38478" + ], + "urls": { + "centos": { + "amd64": "https://download.cf.centos.org/centos/7/updates/x86_64/Packages/firefox-78.9.0-1.el7.centos.x86_64.rpm" + } }, - "firefox-91.13.0":{ - "package_name": "firefox", - "package_version": "91.13.0-1.el7.centos", - "CVE": ["CVE-2023-6873", "CVE-2023-6872"], - "urls": { - "centos": { - "amd64": "https://download.cf.centos.org/centos/7/updates/x86_64/Packages/firefox-91.13.0-1.el7.centos.x86_64.rpm" - } - }, - "uninstall_name": "firefox*" - } + "uninstall_name": "firefox*" + }, + "firefox-91.13.0": { + "package_name": "firefox", + "package_version": "91.13.0-1.el7.centos", + "CVE": [ + "CVE-2023-6873", + "CVE-2023-6872" + ], + "urls": { + "centos": { + "amd64": "https://download.cf.centos.org/centos/7/updates/x86_64/Packages/firefox-91.13.0-1.el7.centos.x86_64.rpm" + } + }, + "uninstall_name": "firefox*" + } } From de57b6347eb6d9b147c7206f6b9d2ec0add75f8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:38:33 +0000 Subject: [PATCH 125/174] style: remove extra whitespaces --- .../templates/var-ossec-etc-ossec-server.conf.j2 | 3 --- 1 file changed, 3 deletions(-) diff --git a/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 b/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 index 7961e113d9..bec4600674 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 +++ b/provisioning/roles/wazuh/ansible-wazuh-manager/templates/var-ossec-etc-ossec-server.conf.j2 @@ -672,7 +672,4 @@ {{ wazuh_manager_config.cluster.hidden }} - - - From 3c79ee5787486766b2ec3aba42e0786199cf6707 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:39:46 +0000 Subject: [PATCH 126/174] style: remove extra white space --- provisioning/roles/createInventory.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/provisioning/roles/createInventory.yaml b/provisioning/roles/createInventory.yaml index 63de2c2d7b..c97f4cb6fb 100644 --- a/provisioning/roles/createInventory.yaml +++ b/provisioning/roles/createInventory.yaml @@ -3,5 +3,5 @@ tasks: - name: Template a file ansible.builtin.template: - src: "{{ source_template }}" + src: "{{ source_template }}" dest: "{{ dest_inventory}}" From 6ed63373a114af9f0ffed5a29da5b3f048c9988e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:41:46 +0000 Subject: [PATCH 127/174] refac: removed unnused function --- .../wazuh_testing/wazuh_testing/tools/file.py | 21 ------------------- 1 file changed, 21 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/file.py b/deps/wazuh_testing/wazuh_testing/tools/file.py index 269c6cac48..d4bab4592f 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/file.py +++ b/deps/wazuh_testing/wazuh_testing/tools/file.py @@ -924,24 +924,3 @@ def modify_file(path, name, new_content=None, is_binary=False): modify_file_group(path, name) modify_file_permission(path, name) modify_file_win_attributes(path, name) - - -def create_temp_file(content: str) -> str: - """ - Create a temporary file with the specified content. - - Args: - content (str): The content to be written to the temporary file. - - Returns: - str: The path to the created temporary file. - """ - try: - fd, temp_file_path = tempfile.mkstemp(text=True) - with os.fdopen(fd, 'w', newline='\n') as temp_file: - temp_file.write(content) - return temp_file_path - except Exception as e: - print(f"Error creating temporary file: {e}") - raise - From 2a3aebb71d892f28864bb65b392ce0bcbe5ee6cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:55:24 +0000 Subject: [PATCH 128/174] style: fix remove_package debugging messages --- .../wazuh_testing/tools/system.py | 56 ++++++++++--------- 1 file changed, 31 insertions(+), 25 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 8df96c6f7b..343fa55533 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -544,7 +544,7 @@ def get_master(self): return master_node - def remove_package(self, host, package_name, system): + def remove_package(self, host, system, package_uninstall_name=None, custom_uninstall_playbook=None): """ Removes a package from the specified host. @@ -560,33 +560,38 @@ def remove_package(self, host, package_name, system): Example: host_manager.remove_package('my_host', 'my_package', system='ubuntu') """ - logging.critical(f"Removing package {package_name} from {host}") - logging.critical(f"System: {system}") - logging.critical(f"Host variables: {self.get_host_variables(host)}") + logging.info(f"Removing package {package_uninstall_name} from host {host}") + logging.info(f"System: {system}") - result = False + remove_operation_result = False os_name = self.get_host_variables(host)['os_name'] - if os_name == 'windows': - logger.setLevel(logging.DEBUG) - r = self.run_playbook(host, 'remove_package_win', params={'uninstall_script_path': package_name}) - # result = self.get_host(host).ansible("ansible.windows.win_shell", fr'powershell -Command "& \"{package_name}\" /S /c"', check=False) - print(r) - logger.setLevel(logging.CRITICAL) - - elif os_name == 'linux': - os = self.get_host_variables(host)['os'].split('_')[0] - if os == 'centos': - logging.critical(f"Centos!") - result = self.get_host(host).ansible("yum", f"name={package_name} state=absent", check=False) - logging.critical(f"Result: {result}") - elif os == 'ubuntu': - result = self.get_host(host).ansible("apt", f"name={package_name} state=absent", check=False) - elif os_name == 'macos': - result = self.get_host(host).ansible("command", f"brew uninstall {package_name}", check=False) - - print(result) - return result + + if custom_uninstall_playbook: + remove_operation_result = self.run_playbook(host, custom_uninstall_playbook) + elif package_uninstall_name: + if os_name == 'windows': + remove_operation_result = self.get_host(host).ansible("win_command", + f"{package_uninstall_name} /uninstall /quiet /S", + check=False) + elif os_name == 'linux': + os = self.get_host_variables(host)['os'].split('_')[0] + if os == 'centos': + remove_operation_result = self.get_host(host).ansible("yum", + f"name={package_uninstall_name} state=absent", + check=False) + elif os == 'ubuntu': + remove_operation_result = self.get_host(host).ansible("apt", + f"name={package_uninstall_name} state=absent", + check=False) + elif os_name == 'macos': + remove_operation_result = self.get_host(host).ansible("command", + f"brew uninstall {package_uninstall_name}", + check=False) + + logging.info(f"Package removed result {remove_operation_result}") + + return remove_operation_result def run_playbook(self, host, playbook_name, params=None): file_dir = os.path.dirname(os.path.realpath(__file__)) @@ -744,4 +749,5 @@ def clean_environment(host_manager, target_files): target_files (dict): a dictionary of tuples, each with the host and the path of the file to clear. """ for target in target_files: + host_manager.clear_file(host=target[0], file_path=target[1]) From 135a12077abb509e5616d7eb2f6815d6ca73af52 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:57:20 +0000 Subject: [PATCH 129/174] fix: replace grafana-enterprise by grafana package name --- .../cases/test_vulnerability.yaml | 124 +++++++++--------- 1 file changed, 62 insertions(+), 62 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 27577f3272..d8253bbf59 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -26,11 +26,11 @@ state_index: True package: centos: - amd64: grafana-enterprise-8.5.5 - arm64v8: grafana-enterprise-8.5.5 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 ubuntu: - amd64: grafana-enterprise-8.5.5 - arm64v8: grafana-enterprise-8.5.5 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 windows: amd64: vlc-3.0.6 macos: @@ -69,11 +69,11 @@ state_index: True package: centos: - amd64: grafana-enterprise-8.5.5 - arm64v8: grafana-enterprise-8.5.5 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 ubuntu: - amd64: grafana-enterprise-8.5.5 - arm64v8: grafana-enterprise-8.5.5 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 windows: amd64: vlc-3.0.6 macos: @@ -109,11 +109,11 @@ state_index: True package: centos: - amd64: grafana-enterprise-8.5.5 - arm64v8: grafana-enterprise-8.5.5 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 ubuntu: - amd64: grafana-enterprise-8.5.5 - arm64v8: grafana-enterprise-8.5.5 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 windows: amd64: vlc-3.0.6 macos: @@ -129,11 +129,11 @@ package: from: centos: - amd64: grafana-enterprise-8.5.5 - arm64v8: grafana-enterprise-8.5.5 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 ubuntu: - amd64: grafana-enterprise-8.5.5 - arm64v8: grafana-enterprise-8.5.5 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 windows: amd64: vlc-3.0.6 macos: @@ -142,9 +142,9 @@ to: centos: amd64: firefox-91.13.0 - arm64v8: grafana-enterprise-8.5.6 + arm64v8: grafana-8.5.6 ubuntu: - amd64: grafana-enterprise-8.5.6 + amd64: grafana-8.5.6 windows: amd64: vlc-3.0.6 macos: @@ -182,11 +182,11 @@ package: from: centos: - amd64: grafana-enterprise-8.5.6 - arm64v8: grafana-enterprise-8.5.6 + amd64: grafana-8.5.6 + arm64v8: grafana-8.5.6 ubuntu: - amd64: grafana-enterprise-8.5.6 - arm64v8: grafana-enterprise-8.5.6 + amd64: grafana-8.5.6 + arm64v8: grafana-8.5.6 windows: amd64: vlc-3.0.7 macos: @@ -194,11 +194,11 @@ arm64v8: node-v17.1.0 to: centos: - amd64: grafana-enterprise-9.1.1 - arm64v8: grafana-enterprise-9.1.1 + amd64: grafana-9.1.1 + arm64v8: grafana-9.1.1 ubuntu: - amd64: grafana-enterprise-9.1.1 - arm64v8: grafana-enterprise-9.1.1 + amd64: grafana-9.1.1 + arm64v8: grafana-9.1.1 windows: amd64: vlc-3.0.7.1 macos: @@ -234,11 +234,11 @@ package: from: centos: - amd64: grafana-enterprise-9.1.1 - arm64v8: grafana-enterprise-9.1.1 + amd64: grafana-9.1.1 + arm64v8: grafana-9.1.1 ubuntu: - amd64: grafana-enterprise-9.1.1 - arm64v8: grafana-enterprise-9.1.1 + amd64: grafana-9.1.1 + arm64v8: grafana-9.1.1 windows: amd64: vlc-3.0.7.1 macos: @@ -246,11 +246,11 @@ arm64v8: node-v18.11.0 to: centos: - amd64: grafana-enterprise-9.2.0 - arm64v8: grafana-enterprise-9.2.0 + amd64: grafana-9.2.0 + arm64v8: grafana-9.2.0 ubuntu: - amd64: grafana-enterprise-9.2.0 - arm64v8: grafana-enterprise-9.2.0 + amd64: grafana-9.2.0 + arm64v8: grafana-9.2.0 windows: amd64: vlc-3.0.11 macos: @@ -287,11 +287,11 @@ package: from: centos: - amd64: grafana-enterprise-9.2.0 - arm64v8: grafana-enterprise-9.2.0 + amd64: grafana-9.2.0 + arm64v8: grafana-9.2.0 ubuntu: - amd64: grafana-enterprise-9.2.0 - arm64v8: grafana-enterprise-9.2.0 + amd64: grafana-9.2.0 + arm64v8: grafana-9.2.0 windows: amd64: vlc-3.0.11 macos: @@ -299,11 +299,11 @@ arm64v8: node-v18.12.0 to: centos: - amd64: grafana-enterprise-9.4.17 - arm64v8: grafana-enterprise-9.4.17 + amd64: grafana-9.4.17 + arm64v8: grafana-9.4.17 ubuntu: - arm64v8: grafana-enterprise-9.4.17 - amd64: grafana-enterprise-9.4.17 + arm64v8: grafana-9.4.17 + amd64: grafana-9.4.17 windows: amd64: vlc-3.0.20 macos: @@ -348,11 +348,11 @@ package: from: centos: - amd64: grafana-enterprise-9.4.17 - arm64v8: grafana-enterprise-9.4.17 + amd64: grafana-9.4.17 + arm64v8: grafana-9.4.17 ubuntu: - arm64v8: grafana-enterprise-9.4.17 - amd64: grafana-enterprise-9.4.17 + arm64v8: grafana-9.4.17 + amd64: grafana-9.4.17 windows: amd64: vlc-3.0.20 macos: @@ -360,11 +360,11 @@ arm64v8: node-v19.5.0 to: centos: - amd64: grafana-enterprise-9.5.13 - arm64v8: grafana-enterprise-9.5.13 + amd64: grafana-9.5.13 + arm64v8: grafana-9.5.13 ubuntu: - amd64: grafana-enterprise-9.5.13 - arm64v8: grafana-enterprise-9.5.13 + amd64: grafana-9.5.13 + arm64v8: grafana-9.5.13 windows: amd64: node-v19.6.0 macos: @@ -400,9 +400,9 @@ from: centos: amd64: firefox-91.13.0 - arm64v8: grafana-enterprise-8.5.5 + arm64v8: grafana-8.5.5 ubuntu: - amd64: grafana-enterprise-8.5.5 + amd64: grafana-8.5.5 windows: amd64: vlc-3.0.6 macos: @@ -411,9 +411,9 @@ to: centos: amd64: firefox-91.13.0 - arm64v8: grafana-enterprise-8.5.5 + arm64v8: grafana-8.5.5 ubuntu: - amd64: grafana-enterprise-8.5.5 + amd64: grafana-8.5.5 windows: amd64: vlc-3.0.6 macos: @@ -448,11 +448,11 @@ state_index: True package: centos: - amd64: grafana-enterprise-9.5.13 - arm64v8: grafana-enterprise-9.5.13 + amd64: grafana-9.5.13 + arm64v8: grafana-9.5.13 ubuntu: - amd64: grafana-enterprise-9.5.13 - arm64v8: grafana-enterprise-9.5.13 + amd64: grafana-9.5.13 + arm64v8: grafana-9.5.13 windows: amd64: node-v19.6.0 macos: @@ -484,11 +484,11 @@ state_index: True package: centos: - amd64: grafana-enterprise-9.5.13 - arm64v8: grafana-enterprise-9.5.13 + amd64: grafana-9.5.13 + arm64v8: grafana-9.5.13 ubuntu: - amd64: grafana-enterprise-9.5.13 - arm64v8: grafana-enterprise-9.5.13 + amd64: grafana-9.5.13 + arm64v8: grafana-9.5.13 windows: amd64: node-v19.6.0 macos: From b244c6aeefd49266edbd996b3db415086f30a7b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:57:57 +0000 Subject: [PATCH 130/174] refac: replace monitoring data function in VD waiter --- .../wazuh_testing/end_to_end/waiters.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index e4c1802178..1f2d501cb5 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -19,13 +19,15 @@ This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ -from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs_manager, monitoring_events_multihost +from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs, monitoring_events_multihost from wazuh_testing.end_to_end.wazuh_api import get_agents_id from wazuh_testing.tools.system import HostManager import time +VD_FEED_UPDATE_TIMEOUT = 300 + def wait_until_vd_is_updated(host_manager: HostManager) -> None: """ Wait until the vulnerability data is updated for all manager hosts. @@ -33,14 +35,11 @@ def wait_until_vd_is_updated(host_manager: HostManager) -> None: Args: host_manager (HostManager): Host manager instance to handle the environment. """ - monitoring_data = {} - for manager in host_manager.get_group_hosts('manager'): - monitoring_data = generate_monitoring_logs_manager( - host_manager, manager, "INFO: Action for 'vulnerability_feed_manager' finished", 1000 - ) + monitoring_data = generate_monitoring_logs(host_manager, ["INFO: Action for 'vulnerability_feed_manager' finished"], + [VD_FEED_UPDATE_TIMEOUT], host_manager.get_group_hosts('manager')) - monitoring_events_multihost(host_manager, monitoring_data) + monitoring_events_multihost(host_manager, monitoring_data) def wait_until_vuln_scan_agents_finished(host_manager: HostManager) -> None: From 77d748405d70eb35a203911ab970d1e5ba26b905 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 14:58:41 +0000 Subject: [PATCH 131/174] refac: remove unnecessary commentaries in VLC remove package playbook --- .../tools/playbooks/remove_package_win.yaml | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_package_win.yaml b/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_package_win.yaml index 1bd2cf3146..562d5303ca 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_package_win.yaml +++ b/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_package_win.yaml @@ -2,21 +2,6 @@ hosts: agent2 become_method: runas tasks: - # - name: Run VLC uninstall - # ansible.windows.win_command: - # cmd: "uninstall.exe /S /c -wait /norestart & timeout 10" - # chdir: '{{uninstall_script_path}}' - # become: yes - # become_user: Administrator - - - - # - name: Run VLC uninstall - # ansible.windows.win_powershell: - # script: "Start-Process '{{uninstall_script_path}}\\uninstall.exe' -ArgumentList '/S /c /norestart -wait'" - # become: yes - # become_user: Administrator - - name: Run VLC uninstall ansible.windows.win_powershell: script: | From 506b2c0f9e0ea1ead64bd46943ec2982f39d15c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 15:00:15 +0000 Subject: [PATCH 132/174] refac: rename VLC removal playbook name --- .../vulnerability_detector_packages/vuln_packages.json | 10 +++++----- .../{remove_package_win.yaml => remove_vlc_win.yaml} | 0 2 files changed, 5 insertions(+), 5 deletions(-) rename deps/wazuh_testing/wazuh_testing/tools/playbooks/{remove_package_win.yaml => remove_vlc_win.yaml} (100%) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json index 95f8ceeca6..2d6cfb099b 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json @@ -202,7 +202,7 @@ } }, "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", - "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + "uninstall_custom_playbook": "remove_vlc_win.yml" }, "vlc-3.0.7": { "package_name": "VLC media player", @@ -224,7 +224,7 @@ } }, "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", - "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + "uninstall_custom_playbook": "remove_vlc_win.yml" }, "vlc-3.0.7.1": { "package_name": "VLC media player", @@ -248,7 +248,7 @@ } }, "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", - "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + "uninstall_custom_playbook": "remove_vlc_win.yml" }, "vlc-3.0.11": { "package_name": "VLC media player", @@ -266,7 +266,7 @@ } }, "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", - "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + "uninstall_custom_playbook": "remove_vlc_win.yml" }, "vlc-3.0.20": { "package_name": "VLC media player", @@ -278,7 +278,7 @@ } }, "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", - "uninstall_custom_playbook": "windows_uninstall_vlc.yml" + "uninstall_custom_playbook": "remove_vlc_win.yml" }, "node-v17.0.1": { "package_name": "node", diff --git a/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_package_win.yaml b/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_vlc_win.yaml similarity index 100% rename from deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_package_win.yaml rename to deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_vlc_win.yaml From 6b85c980eb85b3da5c4329b32f47c1156156e143 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 15:05:03 +0000 Subject: [PATCH 133/174] style: remmove unnecessary commentaries in waiters module --- .../wazuh_testing/end_to_end/waiters.py | 36 ++++++------------- 1 file changed, 10 insertions(+), 26 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index 1f2d501cb5..a51ef59c36 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -1,32 +1,33 @@ """ -Vulnerability Data Update and Scan Monitoring Module. +Module to handle waiters for the end-to-end tests. ----------------------------------------------------- -This module provides functions for waiting until vulnerability data is updated for all manager hosts and until vulnerability scans for all agents are finished. +This module provides functions for waiting until vulnerability data is updated for all manager hosts and until +vulnerability scans for all agents are finished. Functions: - wait_until_vd_is_updated: Wait until the vulnerability data is updated for all manager hosts. - wait_until_vuln_scan_agents_finished: Wait until vulnerability scans for all agents are finished. -Dependencies: - - wazuh_testing.end_to_end.monitoring: Module containing functions for generating monitoring logs and handling events. - - wazuh_testing.end_to_end.wazuh_api: Module containing functions for retrieving agent IDs. - - wazuh_testing.tools.system: Module providing the HostManager class for handling the environment. +Constants: + - VD_FEED_UPDATE_TIMEOUT: Time in seconds to wait until the vulnerability data is updated for all manager hosts. + - VD_INITIAL_SCAN_PER_AGENT_TIMEOUT: Time in seconds to wait until vulnerability scans for each agent is finished. Copyright (C) 2015, Wazuh Inc. Created by Wazuh, Inc. . This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ +import time from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs, monitoring_events_multihost from wazuh_testing.end_to_end.wazuh_api import get_agents_id from wazuh_testing.tools.system import HostManager -import time - VD_FEED_UPDATE_TIMEOUT = 300 +VD_INITIAL_SCAN_PER_AGENT_TIMEOUT = 15 + def wait_until_vd_is_updated(host_manager: HostManager) -> None: """ @@ -38,7 +39,6 @@ def wait_until_vd_is_updated(host_manager: HostManager) -> None: monitoring_data = generate_monitoring_logs(host_manager, ["INFO: Action for 'vulnerability_feed_manager' finished"], [VD_FEED_UPDATE_TIMEOUT], host_manager.get_group_hosts('manager')) - monitoring_events_multihost(host_manager, monitoring_data) @@ -49,21 +49,5 @@ def wait_until_vuln_scan_agents_finished(host_manager: HostManager) -> None: Args: host_manager (HostManager): Host manager instance to handle the environment. """ - # The order of agents may not be guaranteed. - # The Vulnerability Detector scans are ordered based on the agent ID. - # We are currently awaiting completion of all scans globally, - # with a timeout set to 5 minutes for each agent. - final_timeout = 15 * len(host_manager.get_group_hosts('agent')) + final_timeout = VD_INITIAL_SCAN_PER_AGENT_TIMEOUT * len(get_agents_id(host_manager)) time.sleep(final_timeout) - - # for agent in host_manager.get_group_hosts('agent'): - # manager_host = host_manager.get_host_variables(agent)['manager'] - # agents_id = get_agents_id(host_manager) - # agent_id = agents_id.get(agent, '') - # finished_scan_pattern = rf"Finished vulnerability assessment for agent '{agent_id}'" - # - # monitoring_data = generate_monitoring_logs_manager( - # host_manager, manager_host, finished_scan_pattern, final_timeout - # ) - # - # monitoring_events_multihost(host_manager, monitoring_data) From 825deaf94cf5613a24294d8c9c907edd82acf090 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 15:13:59 +0000 Subject: [PATCH 134/174] style: improve VD E2E conftest readability --- .../test_vulnerability_detector/conftest.py | 69 ++++++++++--------- 1 file changed, 38 insertions(+), 31 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index c769bc2c17..aacb537e7f 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -4,12 +4,22 @@ This module contains Pytest fixtures and configuration settings for Wazuh end-to-end testing. It provides reusable fixtures and setup that can be shared across multiple test modules. +Functions: + - `collect_e2e_environment_data`: Collect data from the environment for the test. For example, logs from the + Wazuh components. + - `collect_evidences`: Collect specific evidences for the test + - `validate_environment`: Check if the environment is accessible + Fixtures: - `host_manager`: Fixture for creating a HostManager instance representing the Wazuh test environment. + - `setup`: Fixture for running setup and teardown operations for the test. Configuration Options: - `--inventory-path`: Path to the inventory file specifying the test environment hosts. +Constants: + - `STYLE_PATH`: Path to the CSS stylesheet used by the HTML report. + Example: To use the `host_manager` fixture in your test module: @@ -28,12 +38,15 @@ def test_example(host_manager): import uuid from py.xml import html from numpydoc.docscrape import FunctionDoc +from typing import Generator, Dict from wazuh_testing.tools.system import HostManager from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations from wazuh_testing.end_to_end.logs import get_hosts_logs +STYLE_PATH = os.path.join(os.path.dirname(__file__), '../../../deps/wazuh_testing/wazuh_testing/reporting/style.css') + catalog = list() results = dict() @@ -42,6 +55,7 @@ def collect_e2e_environment_data(test_name, host_manager) -> None: """Collect data from the environment for the test Args: + test_name: Name of the test host_manager: An instance of the HostManager class containing information about hosts. """ logging.info("Collecting environment data") @@ -52,25 +66,25 @@ def collect_e2e_environment_data(test_name, host_manager) -> None: tests_evidences_directory = os.path.join(str(vulnerability_detector_logs_dir), str(test_name)) for host in environment_logs.keys(): + logging.info(f"Collecting logs for {host}") host_logs_name_evidence = host + "_ossec.log" evidence_file = os.path.join(tests_evidences_directory, host_logs_name_evidence) with open(evidence_file, 'w') as evidence_file: evidence_file.write(environment_logs[host]) -def collect_evidences(test_name, host_manager, evidences) -> None: +def collect_evidences(test_name, evidences) -> None: """ - Collect evidences for the test + Collect specific evidences for the test Args: request: Pytest request object - host_manager: An instance of the HostManager class containing information about hosts. results: An instance of the SyscollectorScansTestsResults class containing the results of the tests """ current_dir = os.path.dirname(__file__) vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") tests_evidences_directory = os.path.join(str(vulnerability_detector_logs_dir), str(test_name)) - logging.critical(f"Collecting evidences for {test_name}") + logging.info(f"Collecting evidences for {test_name}") if evidences: logging.info(f"Collecting custom evidences for {test_name}") @@ -87,7 +101,6 @@ def collect_evidences(test_name, host_manager, evidences) -> None: evidence_file.write(str(content)) - @pytest.fixture(scope='session') def host_manager(request): """Fixture for creating a HostManager instance. @@ -106,7 +119,6 @@ def host_manager(request): """ inventory_path = request.config.getoption('--inventory-path') host_manager = HostManager(inventory_path) - print("Testing") validate_environment(host_manager) return host_manager @@ -124,9 +136,19 @@ def validate_environment(host_manager: HostManager) -> None: @pytest.fixture(scope='function') -def setup(preconditions, teardown, host_manager): - """ +def setup(preconditions, teardown, host_manager) -> Generator[Dict, None, None]: + """Fixture for running setup and teardown operations for the specified tests case + It returns a dictionary with the results of the preconditions and teardown operations + + Args: + preconditions: Dictionary with the preconditions operations + teardown: Dictionary with the teardown operations + host_manager: An instance of the HostManager class containing information about hosts. + + Returns: + Generator: Dictionary with the results of the preconditions and teardown operations """ + logging.info("Running setup") result = {} @@ -140,7 +162,7 @@ def setup(preconditions, teardown, host_manager): logging.critical(f"Test failed for host {host}. Check logs for more information") logging.critical(f"Evidences: {result[host]['evidences']}") - logging.info(f"Rsults of preconditions: {result}") + logging.info(f"Result of preconditions: {result}") yield result @@ -154,11 +176,13 @@ def setup(preconditions, teardown, host_manager): logging.critical(f"Test failed for host {host}. Check logs for more information") logging.critical(f"Evidences: {result[host]['evidences']}") - logging.info(f"Rsults of teardown: {result}") + logging.info(f"Result of teardown: {result}") @pytest.fixture(scope='session', autouse=True) def handle_logs(): + """Fixture for handling test evidences logs + """ logs_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'logs') os.makedirs(logs_dir, exist_ok=True) @@ -248,6 +272,7 @@ def pytest_runtest_makereport(item, call): results[report.location[0]] = {'passed': 0, 'failed': 0, 'skipped': 0, 'xfailed': 0, 'error': 0} extra = getattr(report, 'extra', []) + if report.when == 'teardown': # Apply hack to fix length filename problem pytest_html.HTMLReport.TestResult.create_asset = create_asset @@ -282,28 +307,15 @@ def pytest_runtest_makereport(item, call): if item._request.node.name in test_result and 'evidences' in test_result[item._request.node.name]: evidences = test_result[item._request.node.name]['evidences'] - collect_evidences(item._request.node.name, item.funcargs['host_manager'], evidences) + collect_evidences(item._request.node.name, evidences) else: logging.info(f"No evidences found for {item._request.node.name}") - - # if 'host_manager' in item.funcargs: - # evidences = None - # if 'get_results' in item.funcargs: - # test_result = item.funcargs['get_results'] - # if item._request.node.name in test_result and 'evidences' in test_result[item._request.node.name]: - # evidences = test_result[item._request.node.name]['evidences'] - # else: - # logging.critical(f"No evidences found for {item._request.node.name}") - # evidences = None - - # collect_evidences(item._request.node.name, item.funcargs['host_manager'], evidences) - files = [] if os.path.exists(logs_path): files = [f for f in os.listdir(logs_path) if - os.path.isfile(os.path.join(logs_path, f))] + os.path.isfile(os.path.join(logs_path, f))] for filepath in files: fullpath = os.path.join(logs_path, filepath) @@ -360,9 +372,4 @@ def pytest_html_results_summary(prefix, summary, postfix): @pytest.hookimpl(tryfirst=True) def pytest_configure(config): if not config.option.css: - current_dir = os.path.dirname(__file__) - config.option.css = [os.path.join(current_dir, - '../../../deps/wazuh_testing/wazuh_testing/reporting/style.css')] - - - + config.option.css = [STYLE_PATH] From 296dc4025dfafa0b49ab9ceaa82ddc611f06aea7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 15:15:35 +0000 Subject: [PATCH 135/174] style: format yaml VD cases --- .../cases/test_vulnerability.yaml | 291 ++++++++---------- 1 file changed, 136 insertions(+), 155 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index d8253bbf59..087430308e 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -1,12 +1,12 @@ -- case: "Installation of a vulnerable package" - id: "install_package" +- case: Installation of a vulnerable package + id: install_package description: | Installation of a vulnerable package macos: - Used Package: Node 17.0.1 - PKG Format - CVES: - amd64: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], - arm64v8: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + Used Package: Node 17.0.1 - PKG Format + CVES: + amd64: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + arm64v8: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], windows: Used Package: VLC 3.0.6 - Exe Format CVE: ["CVE-2023-47360", "CVE-2023-47359", "CVE-2023-46814", "CVE-2022-41325", "CVE-2020-26664", "CVE-2019-19721", "CVE-2019-13962", "CVE-2019-13602", "CVE-2019-12874", "CVE-2019-5460", "CVE-2019-5459", "CVE-2019-5439"], @@ -22,12 +22,12 @@ - operation: install_package target: agent check: - alerts: True - state_index: True + alerts: true + state_index: true package: centos: - amd64: grafana-8.5.5 - arm64v8: grafana-8.5.5 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 ubuntu: amd64: grafana-8.5.5 arm64v8: grafana-8.5.5 @@ -36,10 +36,9 @@ macos: amd64: node-v17.0.1 arm64v8: node-v17.0.1 - -- case: "Remove vulnerable package" - id: "remove_package" - description: | +- case: Remove vulnerable package + id: remove_package + description: | Removal of a vulnerable package macos: Used Package: Node 17.0.1 - PKG Format @@ -58,19 +57,18 @@ Used Package Grafana - .rpm Format CVE Expected to mitigate: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] - preconditions: null body: tasks: - operation: remove_package target: agent check: - alerts: True - state_index: True + alerts: true + state_index: true package: centos: - amd64: grafana-8.5.5 - arm64v8: grafana-8.5.5 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 ubuntu: amd64: grafana-8.5.5 arm64v8: grafana-8.5.5 @@ -79,38 +77,35 @@ macos: amd64: node-v17.0.1 arm64v8: node-v17.0.1 - - - -- case: "Upgrade: Maintain Vulnerability" - id: "upgrade_package_maintain_vulnerability" +- case: 'Upgrade: Maintain Vulnerability' + id: upgrade_package_maintain_vulnerability description: | - Upgrade of a vulnerable package which maintain vulnerability - macos: - Used Package: Node 17.1.0 - PKG Format - CVES: - amd64: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], - arm64v8: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], - windows: - Used Package: VLC 3.0.7 - Exe Format - "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], - ubuntu: - Used Package Grafana 8.5.6 - .deb Format - CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] - centos: - Used Package Grafana 8.5.6 - .rpm Format - CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] - preconditions: + Upgrade of a vulnerable package which maintain vulnerability + macos: + Used Package: Node 17.1.0 - PKG Format + CVES: + amd64: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + arm64v8: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + windows: + Used Package: VLC 3.0.7 - Exe Format + "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + ubuntu: + Used Package Grafana 8.5.6 - .deb Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + centos: + Used Package Grafana 8.5.6 - .rpm Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + preconditions: tasks: - operation: install_package target: agent check: - alerts: True - state_index: True + alerts: true + state_index: true package: centos: - amd64: grafana-8.5.5 - arm64v8: grafana-8.5.5 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 ubuntu: amd64: grafana-8.5.5 arm64v8: grafana-8.5.5 @@ -124,8 +119,8 @@ - operation: update_package target: agent check: - alerts: True - state_index: True + alerts: true + state_index: true package: from: centos: @@ -151,34 +146,30 @@ amd64: node-v17.1.0 arm64v8: node-v17.1.0 teardown: null - - -# grafana-enterprise_9.1.1_amd64.deb grafana-enterprise_9.2.0_amd64.deb grafana-enterprise_9.4.17_amd64.deb Any of the previous ones (except 9.4.17) grafana-enterprise_9.4.17_amd64.deb grafana-enterprise_9.5.13_amd64.deb grafana-enterprise_10.0.0_amd64.deb - -- case: "Upgrade: New vulnerability " - id: "upgrade_package_maintain_add_vulnerability" +- case: 'Upgrade: New vulnerability ' + id: upgrade_package_maintain_add_vulnerability description: | - Upgrade of a vulnerable package which include a new vulnerability - macos: - Used Package: Node 18.11.0 - PKG Format - CVES: ["CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-30581", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-32222"], - windows: - Used Package: VLC 3.0.7.1 - Exe Format - "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], - ubuntu: - Used Package Grafana 9.1.1 - .deb Format - CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] - centos: - Used Package Grafana 9.1.1 - .rpm Format - CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + Upgrade of a vulnerable package which include a new vulnerability + macos: + Used Package: Node 18.11.0 - PKG Format + CVES: ["CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-30581", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-32222"], + windows: + Used Package: VLC 3.0.7.1 - Exe Format + "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + ubuntu: + Used Package Grafana 9.1.1 - .deb Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + centos: + Used Package Grafana 9.1.1 - .rpm Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] preconditions: null body: tasks: - operation: update_package target: agent check: - alerts: True - state_index: True + alerts: true + state_index: true package: from: centos: @@ -205,32 +196,32 @@ amd64: node-v18.11.0 arm64v8: node-v18.11.0 teardown: null +- case: 'Upgrade: Maintain and new vulnerability ' + id: upgrade_package_maintain_add_vulnerability + description: > + Upgrade of a vulnerable package which maintain vulnerabilities and include + new ones -- case: "Upgrade: Maintain and new vulnerability " - id: "upgrade_package_maintain_add_vulnerability" - description: | - Upgrade of a vulnerable package which maintain vulnerabilities and include new ones - macos: - Used Package: Node 18.12.0 - PKG Format - "CVE": ["CVE-2023-44487", "CVE-2023-38552", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-23936", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-43548", "CVE-2022-3786", "CVE-2022-3602"], - windows: - Used Package: VLC 3.0.11 - Exe Format - "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], - ubuntu: - Used Package Grafana 9.2.0 - .deb Format - CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] - centos: - Used Package Grafana 9.2.0 - .rpm Format - CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] - + macos: + Used Package: Node 18.12.0 - PKG Format + "CVE": ["CVE-2023-44487", "CVE-2023-38552", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-23936", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-43548", "CVE-2022-3786", "CVE-2022-3602"], + windows: + Used Package: VLC 3.0.11 - Exe Format + "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + ubuntu: + Used Package Grafana 9.2.0 - .deb Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] + centos: + Used Package Grafana 9.2.0 - .rpm Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"] preconditions: null body: tasks: - operation: update_package target: agent check: - alerts: True - state_index: True + alerts: true + state_index: true package: from: centos: @@ -256,34 +247,31 @@ macos: amd64: node-v18.12.0 arm64v8: node-v18.12.0 - teardown: null - -- case: "Upgrade: Cease vulnerability" - id: "upgrade_package_remove_vulnerability" +- case: 'Upgrade: Cease vulnerability' + id: upgrade_package_remove_vulnerability description: | - Upgrade of a vulnerable which cease to be vulnerable - macos: - Used Package: Node 19.5.0 - PKG Format - "CVE": [], - windows: - Used Package: VLC 3.0.11 - Exe Format + Upgrade of a vulnerable which cease to be vulnerable + macos: + Used Package: Node 19.5.0 - PKG Format "CVE": [], - ubuntu: - Used Package Grafana 9.4.17 - .deb Format - CVE: [] - centos: - Used Package Grafana 9.4.17 - .rpm Format - CVE: [] - + windows: + Used Package: VLC 3.0.11 - Exe Format + "CVE": [], + ubuntu: + Used Package Grafana 9.4.17 - .deb Format + CVE: [] + centos: + Used Package Grafana 9.4.17 - .rpm Format + CVE: [] preconditions: null body: tasks: - operation: update_package target: agent check: - alerts: True - state_index: True + alerts: true + state_index: true package: from: centos: @@ -309,11 +297,9 @@ macos: amd64: node-v19.5.0 arm64v8: node-v19.5.0 - teardown: null - -- case: "Upgrade: Non vulnerable to non vulnerable" - id: "upgrade_package_nonvulnerable_to_nonvulnerable" +- case: 'Upgrade: Non vulnerable to non vulnerable' + id: upgrade_package_nonvulnerable_to_nonvulnerable description: | Upgrade of a non vulnerable package to non vulnerable macos: @@ -328,13 +314,13 @@ centos: Used Package Grafana 9.5.13 - .rpm Format CVE: [] - preconditions: + preconditions: tasks: - operation: install_package target: agent check: - alerts: True - state_index: True + alerts: true + state_index: true package: windows: amd64: node-v19.5.0 @@ -343,8 +329,8 @@ - operation: update_package target: agent check: - alerts: True - state_index: True + alerts: true + state_index: true package: from: centos: @@ -371,10 +357,9 @@ amd64: node-v19.6.0 arm64v8: node-v19.6.0 teardown: null - -- case: "Upgrade: Non vulnerable to vulnerable package" - id: "upgrade_package_nonvulnerable_to_vulnerable" - description: | +- case: 'Upgrade: Non vulnerable to vulnerable package' + id: upgrade_package_nonvulnerable_to_vulnerable + description: | Upgrade to non vulnerable package to vulnerable macos: Used Package: Node 20.0.0 - PKG Format @@ -394,8 +379,8 @@ - operation: update_package target: agent check: - alerts: True - state_index: True + alerts: true + state_index: true package: from: centos: @@ -419,11 +404,9 @@ macos: amd64: node-v20.0.0 arm64v8: node-v20.0.0 - teardown: null - -- case: "Installation of a non vulnerable package" - id: "install_package" +- case: Installation of a non vulnerable package + id: install_package description: | Installation of a non vulnerable package macos: @@ -444,23 +427,22 @@ - operation: install_package target: agent check: - alerts: True - state_index: True + alerts: true + state_index: true package: - centos: - amd64: grafana-9.5.13 - arm64v8: grafana-9.5.13 - ubuntu: - amd64: grafana-9.5.13 - arm64v8: grafana-9.5.13 - windows: - amd64: node-v19.6.0 - macos: - amd64: node-v19.6.0 - arm64v8: node-v19.6.0 - -- case: "Remove: Non vulnerable package" - id: "remove_non_vulnerable_packge" + centos: + amd64: grafana-9.5.13 + arm64v8: grafana-9.5.13 + ubuntu: + amd64: grafana-9.5.13 + arm64v8: grafana-9.5.13 + windows: + amd64: node-v19.6.0 + macos: + amd64: node-v19.6.0 + arm64v8: node-v19.6.0 +- case: 'Remove: Non vulnerable package' + id: remove_non_vulnerable_packge description: | Removal of a non vulnerable package macos: @@ -480,20 +462,19 @@ - operation: remove_package target: agent check: - alerts: True - state_index: True + alerts: true + state_index: true package: - centos: - amd64: grafana-9.5.13 - arm64v8: grafana-9.5.13 - ubuntu: - amd64: grafana-9.5.13 - arm64v8: grafana-9.5.13 - windows: - amd64: node-v19.6.0 - macos: - amd64: node-v19.6.0 - arm64v8: node-v19.6.0 - - + centos: + amd64: grafana-9.5.13 + arm64v8: grafana-9.5.13 + ubuntu: + amd64: grafana-9.5.13 + arm64v8: grafana-9.5.13 + windows: + amd64: node-v19.6.0 + macos: + amd64: node-v19.6.0 + arm64v8: node-v19.6.0 teardown: null + From 8b50ed3cd062fd3f2bb625f8512ee52d7b02777f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 17:27:44 +0000 Subject: [PATCH 136/174] style: fix pep8 wazuh api --- deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py | 1 + 1 file changed, 1 insertion(+) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py index b508a34dac..b5e269d17f 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/wazuh_api.py @@ -17,6 +17,7 @@ from wazuh_testing.api import make_api_call, get_token_login_api + # Wazuh API Methods def get_api_parameters(host_manager): """ From b3bd81841f5fb5dfcb5ba3cb186fd34d79d05667 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 19:59:17 +0000 Subject: [PATCH 137/174] fix: error in indexer validation values for update tasks --- .../wazuh_testing/end_to_end/monitoring.py | 2 +- .../end_to_end/remote_operations_handler.py | 397 +++++++++--------- .../end_to_end/vulnerability_detector.py | 158 +++++-- .../modules/syscollector/__init__.py | 1 + .../test_vulnerability_detector.py | 5 +- 5 files changed, 321 insertions(+), 242 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index e923c557e4..9581511b99 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -160,7 +160,7 @@ def filter_events_by_timestamp(match_events: List) -> List: return results -def generate_monitoring_logs(host_manager: HostManager, regex_list: List[str], timeout_list: List[str], +def generate_monitoring_logs(host_manager: HostManager, regex_list: List[str], timeout_list: List[int], hosts: List[str], n_iterations=1, greater_than_timestamp: str = '') -> Dict: """ Generate monitoring data for logs on all provided hosts. diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 36bafefdcc..302f94548a 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -15,39 +15,151 @@ - launch_remote_sequential_operation_on_agent: Launch sequential remote operations on a specific agent. - launch_parallel_operations: Launch parallel remote operations on multiple hosts. + Copyright (C) 2015, Wazuh Inc. Created by Wazuh, Inc. . This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ - -import os -import json import logging -import time from typing import Dict, List -from multiprocessing.pool import ThreadPool from datetime import datetime from concurrent.futures import ThreadPoolExecutor +from wazuh_testing.modules.syscollector import TIMEOUT_SYSCOLLECTOR_SHORT_SCAN from wazuh_testing.tools.system import HostManager from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs, monitoring_events_multihost from wazuh_testing.end_to_end.waiters import wait_until_vuln_scan_agents_finished from wazuh_testing.end_to_end.regex import get_event_regex from wazuh_testing.end_to_end.logs import truncate_remote_host_group_files -from wazuh_testing.end_to_end.vulnerability_detector import check_vuln_alert_indexer, check_vuln_state_index +from wazuh_testing.end_to_end.vulnerability_detector import check_vuln_alert_indexer, check_vuln_state_index, \ + load_packages_metadata, get_vulnerabilities_alerts_indexer, get_indexer_values -def load_packages_metadata(): - """ - Load packages metadata from the packages.json file. - """ - packages_filepath = os.path.join(os.path.dirname(__file__), - 'vulnerability_detector_packages', 'vuln_packages.json') +def wait_syscollector_and_vuln_scan(host_manager: HostManager, host: str, operation_data: Dict, + current_datetime: str = '') -> None: + logging.info(f"Waiting for syscollector scan to finish on {host}") + + timeout_syscollector_scan = TIMEOUT_SYSCOLLECTOR_SHORT_SCAN if 'timeout_syscollector_scan' not in \ + operation_data else operation_data['timeout_syscollector_scan'] + + # Wait until syscollector + monitoring_data = generate_monitoring_logs(host_manager, + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [timeout_syscollector_scan, timeout_syscollector_scan], + host_manager.get_group_hosts('agent'), + greater_than_timestamp=current_datetime) + + monitoring_events_multihost(host_manager, monitoring_data) + + logging.critical(f"Waiting for vulnerability scan to finish on {host}") + + wait_until_vuln_scan_agents_finished(host_manager) + + logging.critical(f"Checking agent vulnerability on {host}") + + +def check_vulnerability_alerts(results: Dict, check_data: Dict, current_datetime: str, host_manager: HostManager, + host: str, + package_data: Dict, + operation: str = 'install') -> None: + + # In case of update, we need to not expect vulnerabilities from previous package and expect vulnerabilities from + # new package + + if update: + package_data_from = package_data['from'] + package_data_to = package_data['to'] + vulnerabilities_from = package_data_from['vulnerabilities'] + vulnerabilities_to = package_data_to['vulnerabilities'] + + states_vulnerabilities_expected = vulnerabilities_to + states_vulnerabilities_not_expected = vulnerabilities_from + + # Alerts from previous package should be mitigated + + + else: + states_vulnerabilities_expected = package_data['vulnerabilities'] + states_vulnerabilities_not_expected = [] + + + + + + # Get all the alerts generated in the timestamp + vulnerability_alerts = get_vulnerabilities_alerts_indexer(host_manager, host, current_datetime) + vulnerability_alerts_mitigated = get_vulnerabilities_alerts_indexer(host_manager, host, current_datetime, True) + + vulnerability_index = get_indexer_values(host_manager, index='wazuh-vulnerability-detector', + greater_than_timestamp=current_datetime)['hits']['hits'] + + results['checks']['alerts_found'] = vulnerability_alerts + results['checks']['states_found'] = vulnerability_index + + # Check unexpected alerts. For installation/removel non vulnerable package + if check_data['no_alerts']: + logging.critical(f'Checking unexpected vulnerability alerts in the indexer for {host}') + results['evidences']["alerts_found_unexpected"] = { + "mitigated": vulnerability_alerts_mitigated, + "vulnerabilities": vulnerability_alerts + } + if len(results['evidences']['alerts_found_unexpected'].get('mitigated', [])) > 0 or \ + len(results['evidences']['alerts_found_unexpected'].get('vulnerabilities', [])) > 0: + results['checks']['all_successfull'] = False - with open(packages_filepath, 'r') as packages_file: - packages_data = json.load(packages_file) + # Check expected alerts + elif check_data['alerts']: + logging.critical(f'Checking vulnerability alerts for {host}') + if operation == 'update' or operation == 'remove': + evidence_key = "alerts_not_found_from" if operation == 'update' else "alerts_not_found" + package_data_to_use = package_data['from'] if operation == 'update' else package_data + # Check alerts from previous package are mitigated + results['evidences'][evidence_key] = check_vuln_alert_indexer(vulnerability_alerts_mitigated, + host, + package_data_to_use, + current_datetime) + elif operation == 'install' or operation == 'update': + # Check alerts from new package are found + evidence_key = "alerts_not_found_to" if operation == 'update' else "alerts_not_found" + package_data_to_use = package_data['to'] if operation == 'update' else package_data + results['evidences'][evidence_key] = check_vuln_alert_indexer(vulnerability_alerts, + host, + package_data_to_use, + current_datetime) + + if len(results['evidences'].get('alerts_not_found_from', [])) > 0 or \ + len(results['evidences'].get('alerts_not_found_to', [])) > 0 or \ + len(results['evidences'].get('alerts_not_found', [])) > 0: + results['checks']['all_successfull'] = False + + # Check unexpected states + if check_data['no_indices']: + logging.critical(f'Checking vulnerability state index for {host}') + results['evidences']["states_found_unexpected"] = vulnerability_index + + if len(results['evidences']['states_found_unexpected']) > 0: + results['checks']['all_successfull'] = False - return packages_data + elif check_data['state_index']: + if operation == 'update' or operation == 'remove': + evidence_key = 'states_found_unexpected_from' if operation == 'update' else 'states_found_unexpected' + package_data_to_use = package_data['from'] if operation == 'update' else package_data + # Check states from previous package are mitigated + results['evidences'][evidence_key] = check_vuln_state_index(host_manager, host, package_data_to_use, + current_datetime) + if len(results['evidences'][evidence_key]) != len(package_data_to_use['vulnerabilities']): + results['checks']['all_successfull'] = False + + elif operation == 'install' or operation == 'update': + # Check states from new package are found + evidence_key = 'states_not_found_to' if operation == 'update' else 'states_not_found' + package_data_to_use = package_data['to'] if operation == 'update' else package_data + results['evidences'][evidence_key] = check_vuln_state_index(host_manager, host, package_data_to_use, + current_datetime) + + if len(results['evidences'][evidence_key]) != 0: + results['checks']['all_successfull'] = False def install_package(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): @@ -62,10 +174,24 @@ def install_package(host: str, operation_data: Dict[str, Dict], host_manager: Ho Raises: ValueError: If the specified operation is not recognized. """ - logging.critical(f"Installing package on {host}") + results = { + 'evidences': { + "alerts_not_found": [], + "states_not_found": [], + "alerts_found": [], + "states_found": [], + "alerts_found_unexpected": [], + "states_found_unexpected": [] + }, + 'checks': {} + } + + logging.info(f"Installing package on {host}") + host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] host_os_arch = host_manager.get_host_variables(host)['architecture'] system = host_manager.get_host_variables(host)['os_name'] + if system == 'linux': system = host_manager.get_host_variables(host)['os'].split('_')[0] @@ -81,67 +207,29 @@ def install_package(host: str, operation_data: Dict[str, Dict], host_manager: Ho package_data = load_packages_metadata()[package_id] package_url = package_data['urls'][host_os_name][host_os_arch] - logging.critical(f"Installing package on {host}") - logging.critical(f"Package URL: {package_url}") + logging.info(f"Installing package on {host}") + logging.info(f"Package URL: {package_url}") current_datetime = datetime.utcnow().isoformat() - host_manager.install_package(host, package_url, system) - logging.critical(f"Package {package_url} installed on {host}") - time.sleep(200) - - logging.critical(f"Package installed on {host}") - results = { - 'evidences': { - "alerts_not_found": [], - "states_not_found": [] - }, - 'checks': {} - } - - if 'check' in operation_data and (operation_data['check']['alerts'] or operation_data['check']['state_index']): - logging.critical(f"Waiting for syscollector scan to finish on {host}") - TIMEOUT_SYSCOLLECTOR_SCAN = 80 - truncate_remote_host_group_files(host_manager, 'agent', 'logs') - - # Wait until syscollector - monitoring_data = generate_monitoring_logs(host_manager, - [get_event_regex({'event': 'syscollector_scan_start'}), - get_event_regex({'event': 'syscollector_scan_end'})], - [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], - host_manager.get_group_hosts('agent')) - - result = monitoring_events_multihost(host_manager, monitoring_data) - - logging.critical(f"Syscollector scan finished with result: {result}") - - truncate_remote_host_group_files(host_manager, 'manager', 'logs') + host_manager.install_package(host, package_url, system) - logging.critical(f"Waiting for vulnerability scan to finish on {host}") + logging.info(f"Package {package_url} installed on {host}") - wait_until_vuln_scan_agents_finished(host_manager) + logging.info(f"Package installed on {host}") - logging.critical(f"Checking agent vulnerability on {host}") + results['checks']['all_successfull'] = True - if 'check' in operation_data: - if operation_data['check']['alerts']: - logging.critical(f'Checking vulnerability alerts in the indexer for {host}') - results['evidences']["alerts_not_found"] = check_vuln_alert_indexer(host_manager, host, package_data, - current_datetime) + wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or + operation_data['check']['state_index'] or + operation_data['check']['no_alerts'] or + operation_data['check']['no_indices']) - if operation_data['check']['state_index']: - logging.critical(f'Checking vulnerability state index for {host}') - results['results']["states_not_found"] = check_vuln_state_index(host_manager, host, package_data, - current_datetime) + if wait_is_required: + wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) - logging.critical(f"Results: {results}") - - if results['alerts_not_found'] or results['states_not_found']: - results['checks']['all_successfull'] = False - else: - results['checks']['all_successfull'] = True - else: - results['checks']['all_successfull'] = True + check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, + package_data, operation='install') return { f"{host}": results @@ -161,6 +249,17 @@ def remove_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos ValueError: If the specified operation is not recognized. """ logging.critical(f"Removing package on {host}") + results = { + 'evidences': { + "alerts_not_found": [], + "states_not_found": [], + "alerts_found": [], + "states_found": [], + "alerts_found_unexpected": [], + "states_found_unexpected": [] + }, + 'checks': {} + } host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] host_os_arch = host_manager.get_host_variables(host)['architecture'] system = host_manager.get_host_variables(host)['os_name'] @@ -178,70 +277,30 @@ def remove_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos package_data = load_packages_metadata()[package_id] - logging.critical(f"Removing package on {host}") - uninstall_name = package_data['uninstall_name'] - current_datetime = datetime.utcnow().isoformat() - host_manager.remove_package(host, uninstall_name, system) - - if operation_data['check']['alerts'] or operation_data['check']['state_index']: - logging.critical(f"Waiting for syscollector scan to finish on {host}") - TIMEOUT_SYSCOLLECTOR_SCAN = 80 - truncate_remote_host_group_files(host_manager, 'agent', 'logs') - - # Wait until syscollector - monitoring_data = generate_monitoring_logs(host_manager, - [get_event_regex({'event': 'syscollector_scan_start'}), - get_event_regex({'event': 'syscollector_scan_end'})], - [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], - host_manager.get_group_hosts('agent')) - result = monitoring_events_multihost(host_manager, monitoring_data) - - logging.critical(f"Syscollector scan finished with result: {result}") - - truncate_remote_host_group_files(host_manager, 'manager', 'logs') + logging.critical(f"Removing package on {host}") + if 'uninstall_name' in package_data: + uninstall_name = package_data['uninstall_name'] + host_manager.remove_package(host, system, package_uninstall_name=uninstall_name) + elif 'uninstall_custom_playbook' in package_data: + host_manager.remove_package(host, system, custom_uninstall_playbook=package_data['uninstall_custom_playbook']) - logging.critical(f"Waiting for vulnerability scan to finish on {host}") + wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or + operation_data['check']['state_index'] or + operation_data['check']['no_alerts'] or + operation_data['check']['no_indices']) - wait_until_vuln_scan_agents_finished(host_manager) + if wait_is_required: + wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) - logging.critical(f"Checking agent vulnerability on {host}") + check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, + package_data, operation='remove') - results = { - 'evidences': { - "alerts_not_found": [], - "states_found": [] - }, - 'checks': {} + return { + f"{host}": results } - logging.critical("Operation data is: {}".format(package_data)) - - if 'check' in operation_data: - if operation_data['check']['alerts'] or operation_data['check']['states']: - if operation_data['check']['alerts']: - logging.critical(f'Checking vulnerability alerts in the indexer for {host}') - results["evidences"]["alerts_not_found"] = check_vuln_alert_indexer(host_manager, host, package_data, - current_datetime, - vuln_mitigated=True) - - if operation_data['check']['state_index']: - logging.critical(f'Checking vulnerability state index for {host}') - states_not_found = check_vuln_state_index(host_manager, host, package_data, - current_datetime, return_found=True) - - results['evidences']["states_found"] = states_not_found - - if results['evidences']['alerts_not_found'] or len(results['evidences']['states_found']) > 0: - results['checks']['all_successfull'] = False - else: - results['checks']['all_successfull'] = True - - return { - f"{host}": results - } - def update_package(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): """ @@ -256,6 +315,17 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos ValueError: If the specified operation is not recognized. """ logging.critical(f"Updating package on {host}") + results = { + 'evidences': { + "alerts_not_found_from": [], + 'alerts_found_from': [], + "alerts_found": [], + "states_found": [], + "alerts_found_unexpected": [], + "states_found_unexpected": [] + }, + 'checks': {} + } host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] host_os_arch = host_manager.get_host_variables(host)['architecture'] @@ -284,7 +354,6 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos package_data_from = load_packages_metadata()[package_id_from] package_data_to = load_packages_metadata()[package_id_to] - package_url_from = package_data_from['urls'][host_os_name][host_os_arch] package_url_to = package_data_to['urls'][host_os_name][host_os_arch] logging.critical(f"Installing package on {host}") @@ -294,77 +363,18 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos host_manager.install_package(host, package_url_to, system) logging.critical(f"Package {package_url_to} installed on {host}") - time.sleep(200) logging.critical(f"Package installed on {host}") - if operation_data['check']['alerts'] or operation_data['check']['state_index']: - logging.critical(f"Waiting for syscollector scan to finish on {host}") - TIMEOUT_SYSCOLLECTOR_SCAN = 80 - truncate_remote_host_group_files(host_manager, 'agent', 'logs') - - # Wait until syscollector - monitoring_data = generate_monitoring_logs(host_manager, - [get_event_regex({'event': 'syscollector_scan_start'}), - get_event_regex({'event': 'syscollector_scan_end'})], - [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], - host_manager.get_group_hosts('agent')) - - result = monitoring_events_multihost(host_manager, monitoring_data) - - logging.critical(f"Syscollector scan finished with result: {result}") - - truncate_remote_host_group_files(host_manager, 'manager', 'logs') - - logging.critical(f"Waiting for vulnerability scan to finish on {host}") - - wait_until_vuln_scan_agents_finished(host_manager) - - logging.critical(f"Checking agent vulnerability on {host}") - - results = { - 'evidences': { - "alerts_not_found_from": [], - "states_found_from": [], - "alerts_not_found_to": [], - "states_not_found_to": [], - }, - 'checks': {} - } - - if 'check' in operation_data: - if operation_data['check']['alerts']: - logging.critical(f'Checking vulnerability alerts in the indexer for {host}. Expected CVE mitigation') - results["evidences"]["alerts_not_found_from"] = check_vuln_alert_indexer(host_manager, host, package_data_from, - current_datetime, - vuln_mitigated=True) - - if operation_data['check']['state_index']: - logging.critical(f'Checking vulnerability state index for {host}') - states_not_found = check_vuln_state_index(host_manager, host, package_data_from, - current_datetime, return_found=True) - results['evidences']["states_found_from"] = states_not_found - - logging.critical(f'Checking vulnerability alerts in the indexer for {host}. Expected CVE vuln of new package version') - - if operation_data['check']['alerts']: - logging.critical(f'Checking vulnerability alerts in the indexer for {host}') - results["alerts_not_found_to"] = check_vuln_alert_indexer(host_manager, host, package_data_to, - current_datetime) - - if operation_data['check']['state_index']: - logging.critical(f'Checking vulnerability state index for {host}') - results["states_not_found_to"] = check_vuln_state_index(host_manager, host, package_data_to, - current_datetime) - - logging.critical(f"Results: {results}") - - if results['evidences']['alerts_not_found_from'] or len(results['evidences']['states_found_from']) > 0 or \ - results['evidences']['alerts_not_found_to'] or results['evidences']['states_not_found_to']: - results['checks']['all_successfull'] = False - else: - results['checks']['all_successfull'] = True + wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or + operation_data['check']['state_index'] or + operation_data['check']['no_alerts'] or + operation_data['check']['no_indices']) + if wait_is_required: + wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) + check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, + package_data_from, operation='update') return { f"{host}": results } @@ -414,10 +424,8 @@ def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager, results[target]['checks']['all_successfull'] = False def launch_and_store_result(args): - logging.info("Launching remote operation on host: {}".format(args[0])) host, task, manager = args result = launch_remote_operation(host, task, manager) - logging.info("FINAL Result of remote operation on host {}: {}".format(host, result)) results.update(result) with ThreadPoolExecutor() as executor: @@ -438,4 +446,5 @@ def launch_and_store_result(args): future.result() logging.info("Results in parallel operations: {}".format(results)) + return results diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py index 61709b207b..09549145d0 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py @@ -1,33 +1,90 @@ -from wazuh_testing.tools.system import HostManager -from wazuh_testing.end_to_end.indexer_api import get_indexer_values +""" +Vulnerability detector module. +------------------------- + -from typing import Dict +Functions: + - load_packages_metadata: Load packages metadata from the packages.json file. + - check_vuln_state_index: Check vulnerability state index for a host. + + +Copyright (C) 2015, Wazuh Inc. +Created by Wazuh, Inc. . +This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 +""" import logging import re +import os +import json +from typing import Dict, List + +from wazuh_testing.tools.system import HostManager +from wazuh_testing.end_to_end.indexer_api import get_indexer_values + + +def load_packages_metadata() -> Dict: + """ + Load packages metadata from the packages.json file. + + Returns: + dict: Dictionary containing the packages metadata. + + Example of packages.json: + "vlc-3.0.11": { + "package_name": "VLC media player", + "package_version": "3.0.11", + "CVE": [ + "CVE-2021-25804", + "CVE-2021-25803", + "CVE-2021-25802", + "CVE-2021-25801", + "CVE-2020-26664" + ], + "url": { + "windows": { + "amd64": "https://get.videolan.org/vlc/3.0.11/win64/vlc-3.0.11-win64.exe" + } + }, + "uninstall_custom_playbook": "remove_vlc_win.yml" + }, + """ + packages_filepath = os.path.join(os.path.dirname(__file__), + 'vulnerability_detector_packages', 'vuln_packages.json') + + with open(packages_filepath, 'r') as packages_file: + packages_data = json.load(packages_file) + + return packages_data def check_vuln_state_index(host_manager: HostManager, host: str, package: Dict[str, Dict], - current_datetime: str = "", return_found: bool = False): + current_datetime: str = "") -> List: """ - Check vulnerability state index for a host. + Check vulnerability state index for a host. This function checks if the vulnerability state index contains the + expected vulnerabilities for a host. It returns a dictionary containing the expected alerts not found. Args: host_manager (HostManager): An instance of the HostManager class containing information about hosts. - vulnerability_data (dict): Dictionary containing vulnerability data. - - ToDo: - Implement the functionality. + host (str): Host name. + package (dict): Dictionary containing package data. + current_datetime (str): Datetime to filter the vulnerability state index. """ index_vuln_state_content = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', greater_than_timestamp=current_datetime)['hits']['hits'] expected_alerts_not_found = [] - expected_alerts_found = [] - logging.critical(f"Checking vulnerability state index {package}") + logging.info(f"Checking vulnerability state index {package}") vulnerabilities = package['CVE'] for vulnerability in vulnerabilities: found = False + vulnerability_case = { + 'agent': host, + 'cve': vulnerability, + 'package_name': package['package_name'], + 'package_version': package['package_version'] + } + for indice_vuln in index_vuln_state_content: state_agent = indice_vuln['_source']['agent']['name'] state_cve = indice_vuln["_source"]['vulnerability']['id'] @@ -38,21 +95,15 @@ def check_vuln_state_index(host_manager: HostManager, host: str, package: Dict[s and state_package_name == package['package_name'] and \ state_package_version == package['package_version']: found = True - expected_alerts_found.append(vulnerability) + break if not found: - expected_alerts_not_found.append(vulnerability) - - logging.critical(f"Expected alerts not found: {expected_alerts_not_found}") - logging.critical(f"Triggered alerts: {index_vuln_state_content}") + expected_alerts_not_found.append(vulnerability_case) - if return_found: - return expected_alerts_found - else: - return expected_alerts_not_found + return expected_alerts_not_found -def get_alerts_by_agent(alerts, regex): +def get_alerts_by_agent(alerts, regex) -> Dict: """ Get specific alerts by agent. @@ -73,11 +124,18 @@ def get_alerts_by_agent(alerts, regex): alerts_vuln_by_agent[agent] = [] alerts_vuln_by_agent[agent].append(alert) - logging.critical(f"Alerts by agent: {alerts_vuln_by_agent}") return alerts_vuln_by_agent -def get_indexed_vulnerabilities_by_agent(indexed_vulnerabilities): +def get_indexed_vulnerabilities_by_agent(indexed_vulnerabilities) -> Dict: + """Get indexed vulnerabilities by agent. + + Args: + indexed_vulnerabilities (dict): Dictionary containing the indexed vulnerabilities. + + Returns: + dict: Dictionary containing the indexed vulnerabilities by agent. + """ vulnerabilities_by_agent = {} for vulnerabilities_state in indexed_vulnerabilities['hits']['hits']: if 'agent' in vulnerabilities_state['_source']: @@ -90,39 +148,55 @@ def get_indexed_vulnerabilities_by_agent(indexed_vulnerabilities): return vulnerabilities_by_agent -def check_vuln_alert_indexer(host_manager: HostManager, host: str, package: Dict[str, Dict], - current_datetime: str = '', vuln_mitigated: bool = False): +def get_vulnerabilities_alerts_indexer(host_manager: HostManager, host: str, greater_than_timestamp: str = "", + vuln_mitigated=False) -> Dict: + """Get vulnerabilities alerts by agent. + + Args: + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + host (str): Host name. + greater_than_timestamp (str): Datetime to filter the vulnerability state index. + vuln_mitigated (bool): Indicates if the vulnerability is mitigated. + + Returns: + dict: Dictionary containing the indexed vulnerabilities by agent. + """ + + indexer_alerts = get_indexer_values(host_manager, greater_than_timestamp=greater_than_timestamp)['hits']['hits'] + + regex_to_match = "CVE.* affects .*" if not vuln_mitigated else \ + "The .* that affected .* was solved due to a package removal" + + return get_alerts_by_agent(indexer_alerts, regex_to_match) + + +def check_vuln_alert_indexer(vulnerabilities_alerts: Dict, host: str, package: Dict[str, Dict], + current_datetime: str = '') -> List: """ Check vulnerability alerts in the indexer for a host. Args: - host_manager (HostManager): An instance of the HostManager class containing information about hosts. + vulnerabilities_alerts (Dict): Dictionary containing the indexed vulnerabilities by agent. + host (str): Host name. + package (dict): Dictionary containing package data. + vuln_mitigated (bool): Indicates if the vulnerability is mitigated. vulnerability_data (dict): Dictionary containing vulnerability data. Returns: list: List of vulnerability alerts. """ - logging.critical(f"Checking vulnerability alerts in the indexer {package}") - - regex_to_match = "CVE.* affects .*" - if vuln_mitigated: - regex_to_match = "The .* that affected .* was solved due to a package removal" + logging.info(f"Checking vulnerability alerts in the indexer {package}") - indexer_alerts = get_indexer_values(host_manager, greater_than_timestamp=current_datetime)['hits']['hits'] # Get CVE affects alerts for all agents - alerts_global = get_alerts_by_agent(indexer_alerts, regex_to_match) - - if host in alerts_global: - triggered_alerts = alerts_global[host] + if host in vulnerabilities_alerts: + triggered_alerts = vulnerabilities_alerts[host] else: triggered_alerts = [] - logging.critical(f"Triggered alerts: {triggered_alerts}") - expected_alerts_not_found = [] for cve in package['CVE']: - logging.critical(f"Checking vulnerability: {cve}") + logging.info(f"Checking vulnerability: {cve}") package_name = package['package_name'] package_version = package['package_version'] @@ -138,13 +212,11 @@ def check_vuln_alert_indexer(host_manager: HostManager, host: str, package: Dict if alert_cve == cve and alert_package_name == package_name and \ alert_package_version == package_version: found = True + break if not found: - print(f"Vulnerability not found: {cve} for package {package} {package_version}") + logging.info(f"Vulnerability not found: {cve} for package {package} {package_version}") expected_alerts_not_found.append({'CVE': cve, 'PACKAGE_NAME': package_name, 'PACKAGE_VERSION': package_version}) - logging.critical(f"Expected alerts not found: {expected_alerts_not_found}") - logging.critical(f"Triggered alerts: {triggered_alerts}") - return expected_alerts_not_found diff --git a/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py b/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py index c7c919d0a9..c0d25e25cf 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py @@ -1 +1,2 @@ TIMEOUT_SYSCOLLECTOR_SCAN = 360 +TIMEOUT_SYSCOLLECTOR_SHORT_SCAN = 60 diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index ef655099ff..b827ae2a7d 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -791,15 +791,12 @@ def get_results(self): #def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager): @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) - def test_vulnerability_detector_scans_cases(self, preconditions, body, teardown, setup, + def test_vulnerability_detector_scans_cases(self, setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager, get_results): - setup_results = setup results = get_results results['setup'] = setup_results - import pdb; pdb.set_trace() - hosts_to_ignore = [] for host in setup_results.keys(): From aaf636bd4532acbe65a4aefbde67d7843367e838 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Wed, 31 Jan 2024 20:02:33 +0000 Subject: [PATCH 138/174] fix: include gathering evidences in test vd E2E --- .../test_vulnerability_detector.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index b827ae2a7d..f5907cfab5 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -789,10 +789,11 @@ class TestScanSyscollectorCases(): def get_results(self): return self.results - #def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager): @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) - def test_vulnerability_detector_scans_cases(self, setup_vulnerability_tests, preconditions, body, teardown, setup, + def test_vulnerability_detector_scans_cases(self, request, setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager, get_results): + test_name = request.node.name + setup_results = setup results = get_results results['setup'] = setup_results @@ -812,14 +813,16 @@ def test_vulnerability_detector_scans_cases(self, setup_vulnerability_tests, pre logger.critical(f"Case Info: {body}") # Launch tests tasks - result = launch_parallel_operations(body['tasks'], host_manager, hosts_to_ignore) + test_result = launch_parallel_operations(body['tasks'], host_manager, hosts_to_ignore) success_for_all_agents = True - for host in result.keys(): - if result[host]['checks']['all_successfull'] is False: + for host in test_result.keys(): + if test_result[host]['checks']['all_successfull'] is False: success_for_all_agents = False logger.critical(f"Test failed for host {host}. Check logs for more information") - logger.critical(f"Evidences: {result[host]['evidences']}") + logger.critical(f"Evidences: {test_result[host]['evidences']}") + + results[test_name] = test_result assert success_for_all_agents is True, "Test failed. Check logs for more information" From b36f5a842bbd3f7c32b2fb7d5775b8c167a9021a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 1 Feb 2024 16:35:02 +0000 Subject: [PATCH 139/174] fix: monitoring module --- .../wazuh_testing/end_to_end/monitoring.py | 36 +++++++++---------- 1 file changed, 16 insertions(+), 20 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index 9581511b99..5c3a309d7c 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -86,13 +86,22 @@ def filter_events_by_timestamp(match_events: List) -> List: List: A list of events that fit the timestamp. """ match_that_fit_timestamp = [] + logging.critical(f"match_events: {match_events}") for match in match_events: - if len(match.groups()) > 1: - timestamp_str = match.groups()[0] - timestamp_format = "%Y/%m/%d %H:%M:%S" - timestamp_datetime = datetime.strptime(timestamp_str, timestamp_format) - if timestamp_datetime >= greater_than_timestamp: - match_that_fit_timestamp.append(match) + if match.__class__ == tuple: + timestamp_str = match[0] + else: + timestamp_str = match + + timestamp_format = "%Y/%m/%d %H:%M:%S" + timestamp_format_parameter = "%Y-%m-%dT%H:%M:%S.%f" + + timestamp_datetime = datetime.strptime(timestamp_str, timestamp_format) + greater_than_timestamp_formatted = datetime.strptime(greater_than_timestamp, timestamp_format_parameter) + + logging.critical(f"Comparing {timestamp_datetime} {greater_than_timestamp_formatted} ") + if timestamp_datetime >= greater_than_timestamp_formatted: + match_that_fit_timestamp.append(match) return match_that_fit_timestamp @@ -109,9 +118,7 @@ def filter_events_by_timestamp(match_events: List) -> List: while current_timeout < timeout: file_content = host_manager.get_file_content(host, monitoring_file) - match_regex = re.findall(regex, file_content) - if greater_than_timestamp: match_that_fit_timestamp = filter_events_by_timestamp(match_regex) else: @@ -180,7 +187,7 @@ def generate_monitoring_logs(host_manager: HostManager, regex_list: List[str], t { "agent1":[ { - "regex":"INFO: Action for 'vulnerability_feed_manager' finished", + "regex":["INFO: Action for 'vulnerability_feed_manager' finished"], "file":"/var/ossec/logs/ossec.log", "timeout":1000, "n_iterations":1, @@ -191,17 +198,6 @@ def generate_monitoring_logs(host_manager: HostManager, regex_list: List[str], t """ monitoring_data = {} - if len(regex_list) == 1: - logging.info("Using the same regex for all hosts") - regex_list = regex_list * len(hosts) - elif len(regex_list) != len(hosts): - raise ValueError("The number of regexes must be equal to the number of hosts") - - if len(timeout_list) == 1: - logging.info("Using the same timeout for all hosts") - timeout_list = timeout_list * len(hosts) - elif len(timeout_list) != len(hosts): - raise ValueError("The number of timeouts must be equal to the number of hosts") for host in hosts: monitoring_data[host] = [] From e1aa3854b8edf996cf9d70732bdecf8c4445bec2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 1 Feb 2024 16:36:33 +0000 Subject: [PATCH 140/174] fix: remote operations handler module --- .../end_to_end/remote_operations_handler.py | 40 +++++-------------- 1 file changed, 9 insertions(+), 31 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 302f94548a..75b0331e4f 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -47,8 +47,9 @@ def wait_syscollector_and_vuln_scan(host_manager: HostManager, host: str, opera [get_event_regex({'event': 'syscollector_scan_start'}), get_event_regex({'event': 'syscollector_scan_end'})], [timeout_syscollector_scan, timeout_syscollector_scan], - host_manager.get_group_hosts('agent'), - greater_than_timestamp=current_datetime) + host_manager.get_group_hosts('agent')) + + truncate_remote_host_group_files(host_manager, host_manager.get_group_hosts('agent')) monitoring_events_multihost(host_manager, monitoring_data) @@ -63,42 +64,19 @@ def check_vulnerability_alerts(results: Dict, check_data: Dict, current_datetime host: str, package_data: Dict, operation: str = 'install') -> None: - - # In case of update, we need to not expect vulnerabilities from previous package and expect vulnerabilities from - # new package - - if update: - package_data_from = package_data['from'] - package_data_to = package_data['to'] - vulnerabilities_from = package_data_from['vulnerabilities'] - vulnerabilities_to = package_data_to['vulnerabilities'] - - states_vulnerabilities_expected = vulnerabilities_to - states_vulnerabilities_not_expected = vulnerabilities_from - - # Alerts from previous package should be mitigated - - - else: - states_vulnerabilities_expected = package_data['vulnerabilities'] - states_vulnerabilities_not_expected = [] - - - - - # Get all the alerts generated in the timestamp vulnerability_alerts = get_vulnerabilities_alerts_indexer(host_manager, host, current_datetime) vulnerability_alerts_mitigated = get_vulnerabilities_alerts_indexer(host_manager, host, current_datetime, True) - vulnerability_index = get_indexer_values(host_manager, index='wazuh-vulnerability-detector', + vulnerability_index = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', greater_than_timestamp=current_datetime)['hits']['hits'] - results['checks']['alerts_found'] = vulnerability_alerts - results['checks']['states_found'] = vulnerability_index + results['evidences']['all_alerts_found'] = vulnerability_alerts + results['evidences']['all_alerts_found_mitigated'] = vulnerability_alerts_mitigated + results['evidences']['all_states_found'] = vulnerability_index # Check unexpected alerts. For installation/removel non vulnerable package - if check_data['no_alerts']: + if 'no_alerts' in check_data and check_data['no_alerts']: logging.critical(f'Checking unexpected vulnerability alerts in the indexer for {host}') results['evidences']["alerts_found_unexpected"] = { "mitigated": vulnerability_alerts_mitigated, @@ -134,7 +112,7 @@ def check_vulnerability_alerts(results: Dict, check_data: Dict, current_datetime results['checks']['all_successfull'] = False # Check unexpected states - if check_data['no_indices']: + if 'no_indices' in check_data and check_data['no_indices']: logging.critical(f'Checking vulnerability state index for {host}') results['evidences']["states_found_unexpected"] = vulnerability_index From 459261180673a94b4e5c8db0a05512f22b81623b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 1 Feb 2024 16:37:19 +0000 Subject: [PATCH 141/174] feat: increase syscollector scan timeout --- .../wazuh_testing/modules/syscollector/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py b/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py index c0d25e25cf..7b28039669 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/modules/syscollector/__init__.py @@ -1,2 +1,2 @@ TIMEOUT_SYSCOLLECTOR_SCAN = 360 -TIMEOUT_SYSCOLLECTOR_SHORT_SCAN = 60 +TIMEOUT_SYSCOLLECTOR_SHORT_SCAN = 90 From d850cc0dd0c4152ed8f8cbe3126055cb5eefdc4d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 1 Feb 2024 16:39:30 +0000 Subject: [PATCH 142/174] fix: minor errors in test VD E2E --- .../test_vulnerability_detector/conftest.py | 58 +++++++------ .../test_vulnerability_detector.py | 87 +++++++++++++------ 2 files changed, 92 insertions(+), 53 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index aacb537e7f..a72201d900 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -296,33 +296,37 @@ def pytest_runtest_makereport(item, call): arguments[key] = str(value) extra.append(pytest_html.extras.json(arguments, name="Test arguments")) - # Extra files to be added in 'Links' section - logs_path = os.path.join(os.path.dirname(item._request.node.path), 'logs', item._request.node.name) - - if 'host_manager' in item.funcargs: - collect_e2e_environment_data(item._request.node.name, item.funcargs['host_manager']) - - if 'get_results' in item.funcargs: - test_result = item.funcargs['get_results'] - - if item._request.node.name in test_result and 'evidences' in test_result[item._request.node.name]: - evidences = test_result[item._request.node.name]['evidences'] - collect_evidences(item._request.node.name, evidences) - else: - logging.info(f"No evidences found for {item._request.node.name}") - - files = [] - - if os.path.exists(logs_path): - files = [f for f in os.listdir(logs_path) if - os.path.isfile(os.path.join(logs_path, f))] - - for filepath in files: - fullpath = os.path.join(logs_path, filepath) - if os.path.isfile(fullpath): - with open(fullpath, mode='r', errors='replace') as f: - content = f.read() - extra.append(pytest_html.extras.text(content, name=os.path.split(filepath)[-1])) + try: + current_dir = os.path.dirname(__file__) + vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") + logs_path = os.path.join(str(vulnerability_detector_logs_dir), item._request.node.name) + + if 'host_manager' in item.funcargs: + collect_e2e_environment_data(item._request.node.name, item.funcargs['host_manager']) + + if 'get_results' in item.funcargs: + test_result = item.funcargs['get_results'] + + if item._request.node.name in test_result and 'evidences' in test_result[item._request.node.name]: + evidences = test_result[item._request.node.name]['evidences'] + collect_evidences(item._request.node.name, evidences) + else: + logging.info(f"No evidences found for {item._request.node.name}") + + files = [] + + if os.path.exists(logs_path): + files = [f for f in os.listdir(logs_path) if + os.path.isfile(os.path.join(logs_path, f))] + + for filepath in files: + fullpath = os.path.join(logs_path, filepath) + if os.path.isfile(fullpath): + with open(fullpath, mode='r', errors='replace') as f: + content = f.read() + extra.append(pytest_html.extras.text(content, name=os.path.split(filepath)[-1])) + except Exception as e: + logging.critical(f"Error collecting evidences: {e} for {item._request.node.name}") # if not report.passed and not report.skipped: report.extra = extra diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index f5907cfab5..aa3415e32d 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -303,6 +303,8 @@ def test_syscollector_first_scan(self, request, host_manager, setup_vulnerabilit else: logger.critical("All agents has been scanned") + logger.critical(results) + def test_syscollector_first_scan_alerts(self, request, host_manager, setup_vulnerability_tests, get_results): """ description: Validates that the Vulnerability Detector detects vulnerabilities within the environment in the @@ -339,6 +341,7 @@ def test_syscollector_first_scan_alerts(self, request, host_manager, setup_vulne }, 'evidences': { 'agents_not_detected_alerts': [], + 'vulnerabilities_alerts_first_scan': [] } } @@ -346,8 +349,10 @@ def test_syscollector_first_scan_alerts(self, request, host_manager, setup_vulne test_name = request.node.name # Filter agents that has not been scanned - agents_to_check = results['test_syscollector_first_scan']['evidences']['agents_not_scanned_first_scan'] - if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): + agents_to_check = [agent for agent in host_manager.get_group_hosts('agent') if agent not in + results['test_syscollector_first_scan']['evidences']['agents_not_scanned_first_scan']] + + if len(agents_to_check) == 0: pytest.skip("Syscollector scan not started in any agent. Skipping test") # Wait until all agents has been scanned. Timeout: 60 seconds per agent @@ -361,6 +366,8 @@ def test_syscollector_first_scan_alerts(self, request, host_manager, setup_vulne greater_than_timestamp=setup_vulnerability_tests)['hits']['hits'] vuln_alerts_by_agent_first_scan = get_alerts_by_agent(alerts_first_scan, 'CVE.*? affects.*"?') + test_result['evidences']['vulnerabilities_alerts_first_scan'] = vuln_alerts_by_agent_first_scan + # Check that it has been triggered vulnerability detector alerts logger.critical("Checking that all agents has been scanned") for agent in agents_to_check: @@ -376,11 +383,13 @@ def test_syscollector_first_scan_alerts(self, request, host_manager, setup_vulne results['vulnerabilities_alerts_first_scan'] = vuln_alerts_by_agent_first_scan if not test_result['checks']['all_successfull']: - pytest.fail(f"Some agents has not been scanned: {test_result['evidences']['agents_not_scanned_first_scan']}." + pytest.fail(f"Some agents has not been scanned: {test_result['evidences']['agents_not_detected_alerts']}." "Check logs for more information") else: logger.critical("All agents has been scanned") + logger.critical(results) + def test_syscollector_first_scan_index(self, request, host_manager, setup_vulnerability_tests, get_results): """ description: Validates that the Vulnerability Detector detects vulnerabilities within the environment in the @@ -419,12 +428,15 @@ def test_syscollector_first_scan_index(self, request, host_manager, setup_vulner }, 'evidences': { 'agents_not_detected_index_vulnerabilities': [], + 'vulnerabilities_index_first_scan': [] } } # Filter agents that has not been scanned - agents_to_check = results['test_syscollector_first_scan']['evidences']['agents_not_scanned_first_scan'] - if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): + agents_to_check = [agent for agent in host_manager.get_group_hosts('agent') if agent not in + results['test_syscollector_first_scan']['evidences']['agents_not_scanned_first_scan']] + + if len(agents_to_check) == 0: pytest.skip("Syscollector scan not started in any agent. Skipping test") # Check vulnerabilities in the index @@ -432,10 +444,10 @@ def test_syscollector_first_scan_index(self, request, host_manager, setup_vulner index_state_first_scan = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', greater_than_timestamp=setup_vulnerability_tests) index_vulnerabilities_by_agent_first_scan = get_indexed_vulnerabilities_by_agent(index_state_first_scan) + test_result['evidences']['vulnerabilities_index_first_scan'] = index_vulnerabilities_by_agent_first_scan logger.critical("Checking that all agents has been scanned and generated vulnerabilities in the index") for agent in agents_to_check: - if agent not in list(index_vulnerabilities_by_agent_first_scan.keys()) or \ len(index_vulnerabilities_by_agent_first_scan[agent]) == 0: logger.critical(f"Agent {agent} has not been scanned. Continuing with remaining agents") @@ -449,10 +461,12 @@ def test_syscollector_first_scan_index(self, request, host_manager, setup_vulner if not test_result['checks']['all_successfull']: pytest.fail("Some agents has not been scanned and updated states index:" - f"{test_result['evidences']['agents_not_detected_alerts']}.") + f"{test_result['evidences']['agents_not_detected_index_vulnerabilities']}.") else: logger.critical("All agents has been scanned and updated states index") + logger.critical(results) + def tests_syscollector_vulnerabilities_index_alerts_consistency(self, request, setup_vulnerability_tests, get_results): """ @@ -520,6 +534,8 @@ def tests_syscollector_vulnerabilities_index_alerts_consistency(self, request, else: logger.critical("Index state is consistent with the alerts") + logger.critical(results) + def test_syscollector_second_scan(self, request, host_manager, setup_vulnerability_tests, get_results): """ description: Validates the initiation of the second Syscollector scans across all agents in the environment. @@ -560,15 +576,17 @@ def test_syscollector_second_scan(self, request, host_manager, setup_vulnerabili } # Filter agents that has not been scanned - agents_to_check = results['test_syscollector_first_scan']['evidences']['agents_not_scanned_first_scan'] - if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): + agents_to_check = [agent for agent in host_manager.get_group_hosts('agent') if agent not in + results['test_syscollector_first_scan']['evidences']['agents_not_scanned_first_scan']] + + if len(agents_to_check) == 0: pytest.skip("Syscollector scan not started in any agent. Skipping test") monitoring_data = generate_monitoring_logs(host_manager, [get_event_regex({'event': 'syscollector_scan_start'}), get_event_regex({'event': 'syscollector_scan_end'})], [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], - agents_to_check, 2) + host_manager.get_group_hosts('agent'), 2) monitoring_results = monitoring_events_multihost(host_manager, monitoring_data) @@ -590,6 +608,8 @@ def test_syscollector_second_scan(self, request, host_manager, setup_vulnerabili else: logger.critical("Syscollector scan started in all agents") + logger.critical(results) + def tests_syscollector_first_second_scan_consistency_alerts(self, request, host_manager, setup_vulnerability_tests, get_results): """ @@ -635,9 +655,10 @@ def tests_syscollector_first_second_scan_consistency_alerts(self, request, host_ } # Filter agents that has not been scanned - agents_to_check = results['test_syscollector_first_scan']['evidences']['agents_not_scanned_first_scan'] + agents_to_check = [agent for agent in host_manager.get_group_hosts('agent') if agent not in + results['test_syscollector_first_scan']['evidences']['agents_not_scanned_first_scan']] - if len(agents_to_check) == len(host_manager.get_group_hosts('agent')): + if len(agents_to_check) == 0: pytest.skip("Syscollector scan not started in any agent. Skipping test") logger.critical("Waiting until agent's VD scan is over") @@ -656,14 +677,9 @@ def tests_syscollector_first_second_scan_consistency_alerts(self, request, host_ alert_present_in_first_scan_not_in_second_scan = [] alert_present_in_second_scan_not_in_first_scan = [] - if len(vuln_alerts_by_agent_second_scan.keys()) != len(results['vulnerabilities_alerts_first_scan'].keys()): - test_result['checks']['all_successfull'] = False - logger.critical(f"First scan: {len(results['vulnerabilities_alerts_first_scan'])}") - logger.critical(f"Second scan: {len(vuln_alerts_by_agent_second_scan)}") - logger.critical("Checking that all agents has been scanned") # Check if the number of agents for each scan is the same - if list(results['vulnerabilities_alerts_first_scan'].keys()) != list(vuln_alerts_by_agent_second_scan.keys()): + if len(vuln_alerts_by_agent_second_scan.keys()) != len(results['vulnerabilities_alerts_first_scan'].keys()): test_result['checks']['all_successfull'] = False logging.critical(f"Agents with vulnerabilities changed between scans: " f"First scan: {list(results['vulnerabilities_alerts_first_scan'].keys())}" @@ -673,15 +689,16 @@ def tests_syscollector_first_second_scan_consistency_alerts(self, request, host_ list(set(list(results['vulnerabilities_alerts_first_scan'].keys())) ^ set(list(results['vulnerabilities_alerts_second_scan'].keys()))) - logger.critical("Checking that all agents has been scanned") - # Check if the number of vulnerabilities for each agent is the same + logger.critical("Checking the number of vulnerabilities for each agent") for agent in agents_to_check: - for alert in list(vuln_alerts_by_agent_second_scan[agent][0]): - alert_present_in_second_scan_not_in_first_scan.append(alert) + if agent in list(results['vulnerabilities_alerts_first_scan'].keys()): + for alert in list(vuln_alerts_by_agent_second_scan[agent][0]): + if alert not in results['vulnerabilities_alerts_first_scan'][agent][0]: + alert_present_in_second_scan_not_in_first_scan.append(alert) - for alert in list(results['vulnerabilities_alerts_first_scan'][agent][0]): - if alert in alert_present_in_first_scan_not_in_second_scan: - alert_present_in_first_scan_not_in_second_scan.remove(alert) + for alert in list(results['vulnerabilities_alerts_first_scan'][agent][0]): + if alert not in vuln_alerts_by_agent_second_scan[agent][0]: + alert_present_in_first_scan_not_in_second_scan.append(alert) logger.critical("Checking that all agents has been scanned") if alert_present_in_first_scan_not_in_second_scan or alert_present_in_second_scan_not_in_first_scan: @@ -699,6 +716,8 @@ def tests_syscollector_first_second_scan_consistency_alerts(self, request, host_ else: logger.critical("The number of vulnerabilities is the same between scans") + logger.critical(results) + def tests_syscollector_first_second_scan_consistency_index(self, request, host_manager, setup_vulnerability_tests, get_results): """ @@ -761,6 +780,7 @@ def tests_syscollector_first_second_scan_consistency_index(self, request, host_m else: logger.critical("The number of vulnerabilities is the same between scans") + logger.critical(results) # ------------------------- @@ -790,14 +810,16 @@ def get_results(self): return self.results @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) - def test_vulnerability_detector_scans_cases(self, request, setup_vulnerability_tests, preconditions, body, teardown, setup, + def test_vulnerability_detector_scans_cases(self, request, preconditions, body, teardown, setup, host_manager, get_results): test_name = request.node.name setup_results = setup results = get_results + results[request.node.name] = {} results['setup'] = setup_results + hosts_to_ignore = [] for host in setup_results.keys(): @@ -823,6 +845,19 @@ def test_vulnerability_detector_scans_cases(self, request, setup_vulnerability_t logger.critical(f"Test failed for host {host}. Check logs for more information") logger.critical(f"Evidences: {test_result[host]['evidences']}") + results[test_name]['evidences'] = {} + + for agent in test_result.keys(): + if 'evidences' in test_result[agent]: + for evidence, evidence_values in test_result[agent]['evidences'].items(): + results[test_name]['evidences'][str(agent)+str(evidence)] = evidence_values + + if 'evidences' in test_result: + results[test_name]['evidences'] = test_result['evidences'] + results[test_name] = test_result + logger.critical("Final Results") + logger.critical(results) + assert success_for_all_agents is True, "Test failed. Check logs for more information" From fd2ac3ec0ccbc0264de9a04017a4b3d08153750e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 1 Feb 2024 16:41:23 +0000 Subject: [PATCH 143/174] fix: include setup fixture to Test VD E2E --- .../test_vulnerability_detector/test_vulnerability_detector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index aa3415e32d..030dd1cb1f 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -810,7 +810,7 @@ def get_results(self): return self.results @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) - def test_vulnerability_detector_scans_cases(self, request, preconditions, body, teardown, setup, + def test_vulnerability_detector_scans_cases(self, setup_vulnerability_tests, request, preconditions, body, teardown, setup, host_manager, get_results): test_name = request.node.name From 25764f6fc1b528cf7426388a6b487ed5a0a21f6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 1 Feb 2024 16:47:06 +0000 Subject: [PATCH 144/174] fix: remove unnecesary prints --- deps/wazuh_testing/wazuh_testing/tools/system.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 343fa55533..a031e84e33 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -484,8 +484,6 @@ def install_package(self, host, url, system='ubuntu'): host_manager.install_package('my_host', 'http://example.com/package.deb', system='ubuntu') """ result = False - print(host) - print(url) if system == 'windows': result = self.get_host(host).ansible("win_package", f"path={url} arguments=/S", check=False) elif system == 'ubuntu': @@ -499,7 +497,6 @@ def install_package(self, host, url, system='ubuntu'): elif system == 'macos': package_name = url.split('/')[-1] result = self.get_host(host).ansible("command", f"curl -LO {url}", check=False) - print(result) cmd = f"installer -pkg {package_name} -target /" result = self.get_host(host).ansible("command", cmd, check=False) @@ -595,7 +592,6 @@ def remove_package(self, host, system, package_uninstall_name=None, custom_unins def run_playbook(self, host, playbook_name, params=None): file_dir = os.path.dirname(os.path.realpath(__file__)) - print(playbook_name) playbook_path = f"{file_dir}/playbooks/{playbook_name}.yaml" new_playbook = None new_playbook_path = None @@ -614,7 +610,6 @@ def run_playbook(self, host, playbook_name, params=None): logger.setLevel(logging.DEBUG) try: - print(host) r = ansible_runner.run( inventory=self.inventory_path, playbook=new_playbook_path, @@ -627,7 +622,6 @@ def run_playbook(self, host, playbook_name, params=None): logger.setLevel(logging.CRITICAL) - print(r) return r From 33d2cfba0dd0b3206cee5845e1d5e3582a3bfdd3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 1 Feb 2024 17:25:04 +0000 Subject: [PATCH 145/174] fix: no key vulnerabilities --- .../wazuh_testing/end_to_end/remote_operations_handler.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 75b0331e4f..697edfe213 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -126,7 +126,7 @@ def check_vulnerability_alerts(results: Dict, check_data: Dict, current_datetime # Check states from previous package are mitigated results['evidences'][evidence_key] = check_vuln_state_index(host_manager, host, package_data_to_use, current_datetime) - if len(results['evidences'][evidence_key]) != len(package_data_to_use['vulnerabilities']): + if len(results['evidences'][evidence_key]) != len(package_data_to_use['CVE']): results['checks']['all_successfull'] = False elif operation == 'install' or operation == 'update': @@ -332,6 +332,7 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos package_data_from = load_packages_metadata()[package_id_from] package_data_to = load_packages_metadata()[package_id_to] + package_url_to = package_data_to['urls'][host_os_name][host_os_arch] logging.critical(f"Installing package on {host}") @@ -352,7 +353,7 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, - package_data_from, operation='update') + {'from': package_data_from, 'to': package_data_to} , operation='update') return { f"{host}": results } From eff5d46acb3dbfb44d35c9bdc102c142b9e1d4bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 1 Feb 2024 17:25:27 +0000 Subject: [PATCH 146/174] fix: bad formatted Packages info --- .../vuln_packages.json | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json index 2d6cfb099b..97123ebf51 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json @@ -72,6 +72,10 @@ "centos": { "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.6-1.x86_64.rpm", "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.6-1.aarch64.rpm" + }, + "ubuntu": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb" } }, "uninstall_name": "grafana*" @@ -94,7 +98,7 @@ "CVE-2022-23552", "CVE-2022-23498" ], - "url": { + "urls": { "centos": { "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.1.1-1.x86_64.rpm", "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.1.1-1.aarch64.rpm" @@ -116,7 +120,7 @@ "CVE-2021-25801", "CVE-2020-26664" ], - "url": { + "urls": { "centos": { "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.2.0-1.x86_64.rpm", "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.2.0-1.aarch64.rpm" @@ -132,7 +136,7 @@ "package_name": "grafana", "package_version": "9.4.17", "CVE": [], - "url": { + "urls": { "centos": { "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.4.17-1.x86_64.rpm", "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.4.17-1.aarch64.rpm" @@ -148,7 +152,7 @@ "package_name": "grafana-enterprise", "package_version": "9.5.13", "CVE": [], - "url": { + "urls": { "centos": { "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.5.13-1.x86_64.rpm", "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.5.13-1.aarch64.rpm" @@ -167,7 +171,7 @@ "CVE-2023-4822", "CVE-2023-4399" ], - "url": { + "urls": { "centos": { "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-10.0.0-1.x86_64.rpm", "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-10.0.0-1.aarch64.rpm" @@ -218,7 +222,7 @@ "CVE-2019-13602", "CVE-2019-12874" ], - "url": { + "urls": { "windows": { "amd64": "https://get.videolan.org/vlc/3.0.7/win64/vlc-3.0.7-win64.exe" } @@ -242,7 +246,7 @@ "CVE-2019-14437", "CVE-2019-13602" ], - "url": { + "urls": { "windows": { "amd64": "https://get.videolan.org/vlc/3.0.7.1/win64/vlc-3.0.7.1-win64.exe" } @@ -260,7 +264,7 @@ "CVE-2021-25801", "CVE-2020-26664" ], - "url": { + "urls": { "windows": { "amd64": "https://get.videolan.org/vlc/3.0.11/win64/vlc-3.0.11-win64.exe" } @@ -272,7 +276,7 @@ "package_name": "VLC media player", "package_version": "3.0.20", "CVE": [], - "url": { + "urls": { "windows": { "amd64": "https://get.videolan.org/vlc/3.0.20/win64/vlc-3.0.20-win64.exe" } @@ -413,8 +417,7 @@ "macos": { "amd64": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg", "arm64v8": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg" - }, - "windows": "https://nodejs.org/dist/v19.7.0/win-x86/node.exe" + } }, "uninstall_name": "node*" }, @@ -426,7 +429,8 @@ "macos": { "amd64": "https://nodejs.org/dist/v19.6.0/node-v19.6.0.pkg", "arm64v8": "https://nodejs.org/dist/v19.6.0/node-v19.6.0.pkg" - } + }, + "windows": "https://nodejs.org/dist/v19.6.0/win-x86/node.exe" }, "uninstall_name": "node*" }, From 382f42a03b54b347660a471025ea510ac970ffb4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Thu, 1 Feb 2024 17:25:55 +0000 Subject: [PATCH 147/174] fix: wrong vd case --- .../test_vulnerability_detector/cases/test_vulnerability.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 087430308e..7c805ede91 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -406,7 +406,7 @@ arm64v8: node-v20.0.0 teardown: null - case: Installation of a non vulnerable package - id: install_package + id: install_package_non_vulnerable description: | Installation of a non vulnerable package macos: From 2aa05a21778690cfdee8694fc9455ca8f4e5cd0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 09:39:36 +0000 Subject: [PATCH 148/174] fix: wrong pytest version in E2E tests and Vuln JSON included in setup --- deps/wazuh_testing/setup.py | 1 + requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/setup.py b/deps/wazuh_testing/setup.py index f2b2c21098..0c61bd7358 100644 --- a/deps/wazuh_testing/setup.py +++ b/deps/wazuh_testing/setup.py @@ -31,6 +31,7 @@ 'data/qactl_conf_validator_schema.json', 'data/all_disabled_ossec.conf', 'tools/migration_tool/delta_schema.json', + 'end_to_end/vulnerability_detector_packages/vuln_packages.json', 'tools/migration_tool/CVE_JSON_5.0_bundled.json' ] diff --git a/requirements.txt b/requirements.txt index 3d08780964..5215be5bc4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,8 +22,8 @@ py==1.11.0; platform_system == "Windows" and python_version >= "3.11" pycryptodome>=3.9.8 pyOpenSSL==19.1.0 pytest-html==3.1.1 -pytest==6.2.2 ; python_version <= "3.9" -pytest==7.1.2 ; python_version >= "3.10" +pytest==6.2.2 ; python_version < "3.9" +pytest==7.1.2 ; python_version >= "3.9" pyyaml==6.0.1 requests>=2.23.0 scipy>=1.0; platform_system == "Linux" or platform_system == "Darwin" or platform_system=='Windows' From f58c810dbe4ba47b3b72f29a37591aad10173f04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:04:23 +0000 Subject: [PATCH 149/174] fix: replace hardcoded 5 by scan interval --- deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index 5c3a309d7c..c4bee14b5f 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -131,7 +131,7 @@ def filter_events_by_timestamp(match_events: List) -> List: sleep(scan_interval) - current_timeout += 5 + current_timeout += scan_interval if not regex_match: elements_not_found.append(element) From 92d3c844c851a869f9f59fe2ab9f522aa9b90971 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:04:56 +0000 Subject: [PATCH 150/174] style: replace loggers level and migrate waiter --- .../end_to_end/remote_operations_handler.py | 60 +++++-------------- 1 file changed, 16 insertions(+), 44 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 697edfe213..3965764a31 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -25,48 +25,20 @@ from datetime import datetime from concurrent.futures import ThreadPoolExecutor -from wazuh_testing.modules.syscollector import TIMEOUT_SYSCOLLECTOR_SHORT_SCAN +from wazuh_testing.end_to_end.waiters import wait_syscollector_and_vuln_scan from wazuh_testing.tools.system import HostManager -from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs, monitoring_events_multihost -from wazuh_testing.end_to_end.waiters import wait_until_vuln_scan_agents_finished -from wazuh_testing.end_to_end.regex import get_event_regex -from wazuh_testing.end_to_end.logs import truncate_remote_host_group_files from wazuh_testing.end_to_end.vulnerability_detector import check_vuln_alert_indexer, check_vuln_state_index, \ load_packages_metadata, get_vulnerabilities_alerts_indexer, get_indexer_values -def wait_syscollector_and_vuln_scan(host_manager: HostManager, host: str, operation_data: Dict, - current_datetime: str = '') -> None: - logging.info(f"Waiting for syscollector scan to finish on {host}") - - timeout_syscollector_scan = TIMEOUT_SYSCOLLECTOR_SHORT_SCAN if 'timeout_syscollector_scan' not in \ - operation_data else operation_data['timeout_syscollector_scan'] - - # Wait until syscollector - monitoring_data = generate_monitoring_logs(host_manager, - [get_event_regex({'event': 'syscollector_scan_start'}), - get_event_regex({'event': 'syscollector_scan_end'})], - [timeout_syscollector_scan, timeout_syscollector_scan], - host_manager.get_group_hosts('agent')) - - truncate_remote_host_group_files(host_manager, host_manager.get_group_hosts('agent')) - - monitoring_events_multihost(host_manager, monitoring_data) - - logging.critical(f"Waiting for vulnerability scan to finish on {host}") - - wait_until_vuln_scan_agents_finished(host_manager) - - logging.critical(f"Checking agent vulnerability on {host}") - - def check_vulnerability_alerts(results: Dict, check_data: Dict, current_datetime: str, host_manager: HostManager, host: str, package_data: Dict, operation: str = 'install') -> None: + # Get all the alerts generated in the timestamp - vulnerability_alerts = get_vulnerabilities_alerts_indexer(host_manager, host, current_datetime) - vulnerability_alerts_mitigated = get_vulnerabilities_alerts_indexer(host_manager, host, current_datetime, True) + vulnerability_alerts = get_vulnerabilities_alerts_indexer(host_manager, current_datetime) + vulnerability_alerts_mitigated = get_vulnerabilities_alerts_indexer(host_manager, current_datetime, True) vulnerability_index = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', greater_than_timestamp=current_datetime)['hits']['hits'] @@ -75,9 +47,9 @@ def check_vulnerability_alerts(results: Dict, check_data: Dict, current_datetime results['evidences']['all_alerts_found_mitigated'] = vulnerability_alerts_mitigated results['evidences']['all_states_found'] = vulnerability_index - # Check unexpected alerts. For installation/removel non vulnerable package + # Check unexpected alerts. For installation/removal non vulnerable package if 'no_alerts' in check_data and check_data['no_alerts']: - logging.critical(f'Checking unexpected vulnerability alerts in the indexer for {host}') + logging.info(f'Checking unexpected vulnerability alerts in the indexer for {host}') results['evidences']["alerts_found_unexpected"] = { "mitigated": vulnerability_alerts_mitigated, "vulnerabilities": vulnerability_alerts @@ -88,7 +60,7 @@ def check_vulnerability_alerts(results: Dict, check_data: Dict, current_datetime # Check expected alerts elif check_data['alerts']: - logging.critical(f'Checking vulnerability alerts for {host}') + logging.info(f'Checking vulnerability alerts for {host}') if operation == 'update' or operation == 'remove': evidence_key = "alerts_not_found_from" if operation == 'update' else "alerts_not_found" package_data_to_use = package_data['from'] if operation == 'update' else package_data @@ -113,7 +85,7 @@ def check_vulnerability_alerts(results: Dict, check_data: Dict, current_datetime # Check unexpected states if 'no_indices' in check_data and check_data['no_indices']: - logging.critical(f'Checking vulnerability state index for {host}') + logging.info(f'Checking vulnerability state index for {host}') results['evidences']["states_found_unexpected"] = vulnerability_index if len(results['evidences']['states_found_unexpected']) > 0: @@ -226,7 +198,7 @@ def remove_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos Raises: ValueError: If the specified operation is not recognized. """ - logging.critical(f"Removing package on {host}") + logging.info(f"Removing package on {host}") results = { 'evidences': { "alerts_not_found": [], @@ -257,7 +229,7 @@ def remove_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos current_datetime = datetime.utcnow().isoformat() - logging.critical(f"Removing package on {host}") + logging.info(f"Removing package on {host}") if 'uninstall_name' in package_data: uninstall_name = package_data['uninstall_name'] host_manager.remove_package(host, system, package_uninstall_name=uninstall_name) @@ -292,7 +264,7 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos Raises: ValueError: If the specified operation is not recognized. """ - logging.critical(f"Updating package on {host}") + logging.info(f"Updating package on {host}") results = { 'evidences': { "alerts_not_found_from": [], @@ -335,15 +307,15 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos package_url_to = package_data_to['urls'][host_os_name][host_os_arch] - logging.critical(f"Installing package on {host}") - logging.critical(f"Package URL: {package_url_to}") + logging.info(f"Installing package on {host}") + logging.info(f"Package URL: {package_url_to}") current_datetime = datetime.utcnow().isoformat() host_manager.install_package(host, package_url_to, system) - logging.critical(f"Package {package_url_to} installed on {host}") + logging.info(f"Package {package_url_to} installed on {host}") - logging.critical(f"Package installed on {host}") + logging.info(f"Package installed on {host}") wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or operation_data['check']['state_index'] or @@ -382,7 +354,7 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man operation = operation_data['operation'] if operation in globals(): operation_result = globals()[operation](host, operation_data, host_manager) - logging.critical(f"Operation result: {operation_result}") + logging.info(f"Operation result: {operation_result}") return operation_result else: raise ValueError(f"Operation {operation} not recognized") From 23f453137cdb6f3b9c0f785b770917ed52747a4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:05:32 +0000 Subject: [PATCH 151/174] fix: remove unnused host and update module docstring --- .../wazuh_testing/end_to_end/vulnerability_detector.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py index 09549145d0..af982e1021 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py @@ -6,6 +6,10 @@ Functions: - load_packages_metadata: Load packages metadata from the packages.json file. - check_vuln_state_index: Check vulnerability state index for a host. + - get_alerts_by_agent: Get specific alerts by agent. + - get_indexed_vulnerabilities_by_agent: Get indexed vulnerabilities by agent. + - get_vulnerabilities_alerts_indexer: Get vulnerabilities alerts by agent. + - check_vuln_alert_indexer: Check vulnerability alerts in the indexer for a host. Copyright (C) 2015, Wazuh Inc. @@ -148,7 +152,7 @@ def get_indexed_vulnerabilities_by_agent(indexed_vulnerabilities) -> Dict: return vulnerabilities_by_agent -def get_vulnerabilities_alerts_indexer(host_manager: HostManager, host: str, greater_than_timestamp: str = "", +def get_vulnerabilities_alerts_indexer(host_manager: HostManager, greater_than_timestamp: str = "", vuln_mitigated=False) -> Dict: """Get vulnerabilities alerts by agent. From cb04d16fe3ec074a7e50c071479ce51f0e8ebdb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:05:50 +0000 Subject: [PATCH 152/174] refac: move wait_syscollector_and_vuln_scan to waiters --- .../wazuh_testing/end_to_end/waiters.py | 45 +++++++++++++++++-- 1 file changed, 41 insertions(+), 4 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index a51ef59c36..7cd6f8a406 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -8,6 +8,7 @@ Functions: - wait_until_vd_is_updated: Wait until the vulnerability data is updated for all manager hosts. - wait_until_vuln_scan_agents_finished: Wait until vulnerability scans for all agents are finished. + - wait_syscollector_and_vuln_scan: Wait until syscollector and vulnerability scans are finished for a specific host. Constants: - VD_FEED_UPDATE_TIMEOUT: Time in seconds to wait until the vulnerability data is updated for all manager hosts. @@ -19,14 +20,16 @@ This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ import time +import logging +from typing import Dict from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs, monitoring_events_multihost from wazuh_testing.end_to_end.wazuh_api import get_agents_id +from wazuh_testing.end_to_end.regex import get_event_regex +from wazuh_testing.end_to_end.logs import truncate_remote_host_group_files from wazuh_testing.tools.system import HostManager - - -VD_FEED_UPDATE_TIMEOUT = 300 -VD_INITIAL_SCAN_PER_AGENT_TIMEOUT = 15 +from wazuh_testing.modules.syscollector import TIMEOUT_SYSCOLLECTOR_SHORT_SCAN +from wazuh_testing.modules.vulnerability_detector import VD_FEED_UPDATE_TIMEOUT, VD_INITIAL_SCAN_PER_AGENT_TIMEOUT def wait_until_vd_is_updated(host_manager: HostManager) -> None: @@ -51,3 +54,37 @@ def wait_until_vuln_scan_agents_finished(host_manager: HostManager) -> None: """ final_timeout = VD_INITIAL_SCAN_PER_AGENT_TIMEOUT * len(get_agents_id(host_manager)) time.sleep(final_timeout) + + +def wait_syscollector_and_vuln_scan(host_manager: HostManager, host: str, operation_data: Dict, + current_datetime: str = '') -> None: + """ + Wait until syscollector and vulnerability scans are finished for a specific host. + + Args: + host_manager (HostManager): Host manager instance to handle the environment. + host (str): Host to wait for the scans to finish. + operation_data (Dict): Dictionary with the operation data. + current_datetime (str): Current datetime to use in the operation. + """ + logging.info(f"Waiting for syscollector scan to finish on {host}") + + timeout_syscollector_scan = TIMEOUT_SYSCOLLECTOR_SHORT_SCAN if 'timeout_syscollector_scan' not in \ + operation_data else operation_data['timeout_syscollector_scan'] + + # Wait until syscollector + monitoring_data = generate_monitoring_logs(host_manager, + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [timeout_syscollector_scan, timeout_syscollector_scan], + host_manager.get_group_hosts('agent')) + + truncate_remote_host_group_files(host_manager, host_manager.get_group_hosts('agent')) + + monitoring_events_multihost(host_manager, monitoring_data) + + logging.info(f"Waiting for vulnerability scan to finish on {host}") + + wait_until_vuln_scan_agents_finished(host_manager) + + logging.info(f"Checking agent vulnerability on {host}") From 745718f553ffe8ccf349b55a4c598ca9ba756dec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:06:10 +0000 Subject: [PATCH 153/174] refac: move timeouts to VD module --- .../wazuh_testing/modules/vulnerability_detector/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/deps/wazuh_testing/wazuh_testing/modules/vulnerability_detector/__init__.py b/deps/wazuh_testing/wazuh_testing/modules/vulnerability_detector/__init__.py index 742db507ad..ee3dfc47cf 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/vulnerability_detector/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/modules/vulnerability_detector/__init__.py @@ -42,6 +42,11 @@ VULNERABILITY_DETECTOR_PREFIX = r'.*wazuh-modulesd:vulnerability-detector.*' + +# End to end tests variables +VD_FEED_UPDATE_TIMEOUT = 300 +VD_INITIAL_SCAN_PER_AGENT_TIMEOUT = 15 + VULNERABLE_PACKAGES = [ { "name": "custom-package-0", From 40c7c56146167e518f5ecbad8fab1926aa8a2fd2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:06:27 +0000 Subject: [PATCH 154/174] style: pep8 to system module --- .../wazuh_testing/tools/system.py | 22 +++++++++---------- 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index a031e84e33..67144b5106 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -41,7 +41,6 @@ def __init__(self, inventory_path: str): except (OSError, yaml.YAMLError) as inventory_err: raise ValueError(f"Could not open/load Ansible inventory '{self.inventory_path}': {inventory_err}") - data_loader = DataLoader() self.inventory_manager = InventoryManager(loader=data_loader, sources=inventory_path) self.hosts_variables = {} @@ -51,7 +50,6 @@ def __init__(self, inventory_path: str): for host in self.inventory_manager.get_hosts(): self.hosts_variables[host] = variable_manager.get_vars(host=self.inventory_manager.get_host(str(host))) - def get_inventory(self) -> dict: """Get the loaded Ansible inventory. @@ -88,7 +86,6 @@ def get_group_hosts(self, pattern='None'): else: return [str(host) for host in self.inventory_manager.get_hosts()] - def get_host_groups(self, host): """Get the list of groups to which the specified host belongs. @@ -142,7 +139,6 @@ def truncate_file(self, host: str, filepath: str): return result - def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/ossec.conf', check: bool = False): """Move from src_path to the desired location dest_path for the specified host. @@ -155,10 +151,12 @@ def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/o result = None if self.get_host_variables(host)['os_name'] == 'windows': - result = self.get_host(host).ansible("ansible.windows.win_copy", f"src='{src_path}' dest='{dest_path}'", check=check) + result = self.get_host(host).ansible("ansible.windows.win_copy", f"src='{src_path}' dest='{dest_path}'", + check=check) else: - result = self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=preserve", - check=check) + result = self.get_host(host).ansible('copy', f'src={src_path} dest={dest_path}' + 'owner=wazuh group=wazuh mode=preserve', + check=check) return result @@ -178,7 +176,7 @@ def add_block_to_file(self, host: str, path: str, replace: str, before: str, aft self.get_host(host).ansible("replace", fr"path={path} regexp='{after}[\s\S]+{before}' replace='{replace}'", check=check) - def modify_file_content(self, host: str, path: str = None, content: Union[str, bytes] = ''): + def modify_file_content(self, host: str, path: str = '', content: Union[str, bytes] = ''): """Create a file with a specified content and copies it to a path. Args: @@ -491,7 +489,8 @@ def install_package(self, host, url, system='ubuntu'): if result['changed'] and result['stderr'] == '': result = True elif system == 'centos': - result = self.get_host(host).ansible("yum", f"name={url} state=present sslverify=false disable_gpg_check=True", check=False) + result = self.get_host(host).ansible("yum", f"name={url} state=present" + 'sslverify=false disable_gpg_check=True', check=False) if 'rc' in result and result['rc'] == 0 and result['changed']: result = True elif system == 'macos': @@ -516,7 +515,7 @@ def get_master_ip(self): for manager in self.get_group_hosts('manager'): if 'type' in self.get_host_variables(manager) and \ - self.get_host_variables(manager)['type'] == 'master': + self.get_host_variables(manager)['type'] == 'master': master_ip = self.get_host_variables(manager)['ip'] break @@ -624,8 +623,6 @@ def run_playbook(self, host, playbook_name, params=None): return r - - def handle_wazuh_services(self, host, operation): """ Handles Wazuh services on the specified host. @@ -711,6 +708,7 @@ def remove_agents(self): endpoint=f'/agents?agents_list={",".join(agents_ids)}&status=all&older_than=0s', token=token, ) + logging.info(f"Agents removed result {result}") def get_hosts_not_reachable(self) -> List[str]: """ From a6a5a508d0c2cb10e5339196b0f251aad8514c7f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:06:41 +0000 Subject: [PATCH 155/174] fix: hardcoded host to remove vlc playbook --- .../wazuh_testing/tools/playbooks/remove_vlc_win.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_vlc_win.yaml b/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_vlc_win.yaml index 562d5303ca..8f2b593ac8 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_vlc_win.yaml +++ b/deps/wazuh_testing/wazuh_testing/tools/playbooks/remove_vlc_win.yaml @@ -1,5 +1,5 @@ - name: Uninstall VLC - hosts: agent2 + hosts: HOSTS become_method: runas tasks: - name: Run VLC uninstall From e2b6f32c24141623d7b5e5b6f5887f5b09ec0ccf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:07:11 +0000 Subject: [PATCH 156/174] style: macos tasks name consistency --- provisioning/roles/apache/tasks/{macOS.yaml => macos.yaml} | 0 .../roles/wazuh/ansible-filebeat-oss/defaults/main.yml | 4 ++-- .../roles/wazuh/ansible-wazuh-agent/handlers/main.yml | 4 ++-- .../wazuh/ansible-wazuh-agent/tasks/{MacOS.yml => macos.yml} | 0 provisioning/roles/wazuh/ansible-wazuh-agent/tasks/main.yml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) rename provisioning/roles/apache/tasks/{macOS.yaml => macos.yaml} (100%) rename provisioning/roles/wazuh/ansible-wazuh-agent/tasks/{MacOS.yml => macos.yml} (100%) diff --git a/provisioning/roles/apache/tasks/macOS.yaml b/provisioning/roles/apache/tasks/macos.yaml similarity index 100% rename from provisioning/roles/apache/tasks/macOS.yaml rename to provisioning/roles/apache/tasks/macos.yaml diff --git a/provisioning/roles/wazuh/ansible-filebeat-oss/defaults/main.yml b/provisioning/roles/wazuh/ansible-filebeat-oss/defaults/main.yml index b3332cf924..4af68ae478 100644 --- a/provisioning/roles/wazuh/ansible-filebeat-oss/defaults/main.yml +++ b/provisioning/roles/wazuh/ansible-filebeat-oss/defaults/main.yml @@ -9,7 +9,7 @@ filebeat_output_indexer_hosts: - "localhost:9200" filebeat_module_package_url: https://packages.wazuh.com/4.x/filebeat -filebeat_module_package_name: wazuh-filebeat-0.3.tar.gz +filebeat_module_package_name: wazuh-filebeat-0.4.tar.gz filebeat_module_package_path: /tmp/ filebeat_module_destination: /usr/share/filebeat/module filebeat_module_folder: /usr/share/filebeat/module/wazuh @@ -26,4 +26,4 @@ filebeatrepo: apt: 'deb https://packages.wazuh.com/4.x/apt/ stable main' yum: 'https://packages.wazuh.com/4.x/yum/' gpg: 'https://packages.wazuh.com/key/GPG-KEY-WAZUH' - key_id: '0DCFCA5547B19D2A6099506096B3EE5F29111145' \ No newline at end of file + key_id: '0DCFCA5547B19D2A6099506096B3EE5F29111145' diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml index 4285e2bc3c..63e5c5b32b 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml @@ -5,5 +5,5 @@ - name: Windows | Restart Wazuh Agent win_service: name=WazuhSvc start_mode=auto state=restarted -- name: MacOS | restart wazuh-agent - ansible.builtin.shell: "{{ macos_wazuh_dir }}/bin/wazuh-control restart" +- name: MacOS | Restart Wazuh Agent + shell: "{{ macos_wazuh_dir }}/bin/wazuh-control restart" diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/macos.yml similarity index 100% rename from provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml rename to provisioning/roles/wazuh/ansible-wazuh-agent/tasks/macos.yml diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/main.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/main.yml index 6eddb1ce65..486629acf4 100644 --- a/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/main.yml +++ b/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/main.yml @@ -24,7 +24,7 @@ - include_tasks: "Linux.yml" when: ansible_system == "Linux" -- include_tasks: "MacOS.yml" +- include_tasks: "macos.yml" when: ansible_system == "Darwin" - include_tasks: "Solaris.yml" From 262994a07d548ba4e7d2e897edc0dccb2ab506f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:08:06 +0000 Subject: [PATCH 157/174] fix: duplicated agent/manager configuration --- .../configurations/agent.yaml | 2 +- .../configurations/manager.yaml | 21 +++---------------- 2 files changed, 4 insertions(+), 19 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml b/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml index 6bcc2eb290..17626543de 100644 --- a/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml +++ b/tests/end_to_end/test_vulnerability_detector/configurations/agent.yaml @@ -10,7 +10,7 @@ - section: syscheck elements: - disabled: - value: 'no' + value: 'yes' - section: wodle attributes: - name: syscollector diff --git a/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml index e62289be13..efe1d4953c 100644 --- a/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml +++ b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml @@ -33,32 +33,17 @@ elements: - enabled: value: 'no' + - section: rootcheck elements: - disabled: value: 'yes' + - section: syscheck elements: - disabled: value: 'yes' - - section: wodle - attributes: - - name: syscollector - elements: - - disabled: - value: 'no' - - section: sca - elements: - - enabled: - value: 'no' - - section: rootcheck - elements: - - disabled: - value: 'yes' - - section: syscheck - elements: - - disabled: - value: 'yes' + - section: wodle attributes: - name: syscollector From 156d4a7d8afce6543f9d54b327e816f03c9b69e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:08:17 +0000 Subject: [PATCH 158/174] refac: remove unnused teardown --- .../cases/test_vulnerability.yaml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 7c805ede91..77a755ea68 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -145,7 +145,7 @@ macos: amd64: node-v17.1.0 arm64v8: node-v17.1.0 - teardown: null + - case: 'Upgrade: New vulnerability ' id: upgrade_package_maintain_add_vulnerability description: | @@ -195,7 +195,7 @@ macos: amd64: node-v18.11.0 arm64v8: node-v18.11.0 - teardown: null + - case: 'Upgrade: Maintain and new vulnerability ' id: upgrade_package_maintain_add_vulnerability description: > @@ -247,7 +247,7 @@ macos: amd64: node-v18.12.0 arm64v8: node-v18.12.0 - teardown: null + - case: 'Upgrade: Cease vulnerability' id: upgrade_package_remove_vulnerability description: | @@ -297,7 +297,7 @@ macos: amd64: node-v19.5.0 arm64v8: node-v19.5.0 - teardown: null + - case: 'Upgrade: Non vulnerable to non vulnerable' id: upgrade_package_nonvulnerable_to_nonvulnerable description: | @@ -356,7 +356,7 @@ macos: amd64: node-v19.6.0 arm64v8: node-v19.6.0 - teardown: null + - case: 'Upgrade: Non vulnerable to vulnerable package' id: upgrade_package_nonvulnerable_to_vulnerable description: | @@ -404,7 +404,7 @@ macos: amd64: node-v20.0.0 arm64v8: node-v20.0.0 - teardown: null + - case: Installation of a non vulnerable package id: install_package_non_vulnerable description: | @@ -476,5 +476,5 @@ macos: amd64: node-v19.6.0 arm64v8: node-v19.6.0 - teardown: null + From 1c25b5c6ddeab3e144d38008a83de017e879a130 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:09:14 +0000 Subject: [PATCH 159/174] fix: style errors and remove logs fixture --- tests/end_to_end/conftest.py | 2 +- tests/end_to_end/test_vulnerability_detector/conftest.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/end_to_end/conftest.py b/tests/end_to_end/conftest.py index d9dd577694..d86dcfef28 100644 --- a/tests/end_to_end/conftest.py +++ b/tests/end_to_end/conftest.py @@ -155,7 +155,7 @@ def validate_environments(request): # -------------------------------------------------- End of Step 4 ------------------------------------------------- -@pytest.fixture(scope='module', autouse=False) +@pytest.fixture(scope='module') def run_specific_validations(request): """Fixture with module scope to validate the environment of an specific tests with specific validation tasks. diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index a72201d900..1fe00defd7 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -1,5 +1,5 @@ """ -Pytest Configuration for Wazuh Vulnerability Detector End-to-End Testing +Pytest configuration for Vulnerability Detector E2E testing This module contains Pytest fixtures and configuration settings for Wazuh end-to-end testing. It provides reusable fixtures and setup that can be shared across multiple test modules. @@ -78,8 +78,8 @@ def collect_evidences(test_name, evidences) -> None: Collect specific evidences for the test Args: - request: Pytest request object - results: An instance of the SyscollectorScansTestsResults class containing the results of the tests + test_name: Name of the test + evidences: Dictionary with the evidences to collect """ current_dir = os.path.dirname(__file__) vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") @@ -212,6 +212,8 @@ def pytest_runtest_setup(item): yield + shutil.rmtree(logs_test_dir, ignore_errors=True) + def pytest_html_results_table_header(cells): cells.insert(4, html.th('Tier', class_='sortable tier', col='tier')) From 59fc28eeea20d0a3e307d7f9d8a5e5503af1bc48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:24:30 +0000 Subject: [PATCH 160/174] docs: remove outdated commentary --- .../test_vulnerability_detector/test_vulnerability_detector.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 030dd1cb1f..59bf9e9b96 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -231,7 +231,6 @@ def check_vuln_state_consistency(vulnerabilities_alerts, vulnerabilities_states) @pytest.mark.filterwarnings('ignore::urllib3.exceptions.InsecureRequestWarning') class TestInitialScans(): - # results = SyscollectorScansTestsResults() results = {} @pytest.fixture(scope='class') From e7203dff71053d556bd32221fc6150d0cd3d4808 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:29:22 +0000 Subject: [PATCH 161/174] style: fix capitalization of logging messages --- .../test_vulnerability_detector/test_vulnerability_detector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 59bf9e9b96..4304d9721e 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -277,7 +277,7 @@ def test_syscollector_first_scan(self, request, host_manager, setup_vulnerabilit } } - logger.critical("Monitoring Syscollector First Scan") + logger.critical("Monitoring syscollector first scan") list_hosts = host_manager.get_group_hosts('agent') monitoring_data = generate_monitoring_logs(host_manager, [get_event_regex({'event': 'syscollector_scan_start'}), From 0f983ae3a3299757d57be27e7be339696192c16f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 12:32:48 +0000 Subject: [PATCH 162/174] fix: remove critical logging from monitoring --- deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py index c4bee14b5f..915ac8e112 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/monitoring.py @@ -86,7 +86,6 @@ def filter_events_by_timestamp(match_events: List) -> List: List: A list of events that fit the timestamp. """ match_that_fit_timestamp = [] - logging.critical(f"match_events: {match_events}") for match in match_events: if match.__class__ == tuple: timestamp_str = match[0] @@ -99,7 +98,6 @@ def filter_events_by_timestamp(match_events: List) -> List: timestamp_datetime = datetime.strptime(timestamp_str, timestamp_format) greater_than_timestamp_formatted = datetime.strptime(greater_than_timestamp, timestamp_format_parameter) - logging.critical(f"Comparing {timestamp_datetime} {greater_than_timestamp_formatted} ") if timestamp_datetime >= greater_than_timestamp_formatted: match_that_fit_timestamp.append(match) From dd9a70ada0903a304cb6710c83927c1849b836e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 14:04:46 +0000 Subject: [PATCH 163/174] fix: report generation E2E VD --- .../test_vulnerability_detector/conftest.py | 25 ++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index 1fe00defd7..3142d4a967 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -46,6 +46,7 @@ def test_example(host_manager): STYLE_PATH = os.path.join(os.path.dirname(__file__), '../../../deps/wazuh_testing/wazuh_testing/reporting/style.css') +gather_evidences_when_passed = False catalog = list() results = dict() @@ -212,6 +213,19 @@ def pytest_runtest_setup(item): yield + +@pytest.hookimpl(hookwrapper=True, tryfirst=True) +def pytest_runtest_teardown(item, nextitem): + # Current dir and logs + + yield + + item_name = item._request.node.name + item_path = item._request.node.path + + logs_dir = os.path.join(os.path.dirname(item_path), 'logs') + logs_test_dir = os.path.join(logs_dir, item_name) + shutil.rmtree(logs_test_dir, ignore_errors=True) @@ -275,7 +289,7 @@ def pytest_runtest_makereport(item, call): extra = getattr(report, 'extra', []) - if report.when == 'teardown': + if report.when == 'call': # Apply hack to fix length filename problem pytest_html.HTMLReport.TestResult.create_asset = create_asset @@ -299,6 +313,7 @@ def pytest_runtest_makereport(item, call): extra.append(pytest_html.extras.json(arguments, name="Test arguments")) try: + current_dir = os.path.dirname(__file__) vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") logs_path = os.path.join(str(vulnerability_detector_logs_dir), item._request.node.name) @@ -330,8 +345,12 @@ def pytest_runtest_makereport(item, call): except Exception as e: logging.critical(f"Error collecting evidences: {e} for {item._request.node.name}") - # if not report.passed and not report.skipped: - report.extra = extra + if gather_evidences_when_passed and not report.skipped: + report.extra = extra + else: + if not report.passed and not report.skipped: + report.extra = extra + if report.longrepr is not None and report.longreprtext.split()[-1] == 'XFailed': results[report.location[0]]['xfailed'] += 1 From f6f9b13eef363770dfece5799fe9a119f44e3dbf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 14:42:56 +0000 Subject: [PATCH 164/174] fix: setup stages for only one OS --- .../end_to_end/remote_operations_handler.py | 119 ++++++++++-------- .../vuln_packages.json | 3 +- 2 files changed, 67 insertions(+), 55 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 3965764a31..6e4ea7071a 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -154,36 +154,39 @@ def install_package(host: str, operation_data: Dict[str, Dict], host_manager: Ho else: raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") - package_data = load_packages_metadata()[package_id] - package_url = package_data['urls'][host_os_name][host_os_arch] + package_data = load_packages_metadata()[package_id] + package_url = package_data['urls'][host_os_name][host_os_arch] - logging.info(f"Installing package on {host}") - logging.info(f"Package URL: {package_url}") + logging.info(f"Installing package on {host}") + logging.info(f"Package URL: {package_url}") - current_datetime = datetime.utcnow().isoformat() + current_datetime = datetime.utcnow().isoformat() - host_manager.install_package(host, package_url, system) + host_manager.install_package(host, package_url, system) - logging.info(f"Package {package_url} installed on {host}") + logging.info(f"Package {package_url} installed on {host}") - logging.info(f"Package installed on {host}") + logging.info(f"Package installed on {host}") - results['checks']['all_successfull'] = True + results['checks']['all_successfull'] = True - wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or - operation_data['check']['state_index'] or - operation_data['check']['no_alerts'] or - operation_data['check']['no_indices']) + wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or + operation_data['check']['state_index'] or + operation_data['check']['no_alerts'] or + operation_data['check']['no_indices']) - if wait_is_required: - wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) + if wait_is_required: + wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) - check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, - package_data, operation='install') + check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, + package_data, operation='install') + else: + logging.info(f"No operation to perform on {host}") + results['checks']['all_successfull'] = True return { f"{host}": results - } + } def remove_package(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): @@ -225,27 +228,32 @@ def remove_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos else: raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") - package_data = load_packages_metadata()[package_id] + package_data = load_packages_metadata()[package_id] - current_datetime = datetime.utcnow().isoformat() + current_datetime = datetime.utcnow().isoformat() - logging.info(f"Removing package on {host}") - if 'uninstall_name' in package_data: - uninstall_name = package_data['uninstall_name'] - host_manager.remove_package(host, system, package_uninstall_name=uninstall_name) - elif 'uninstall_custom_playbook' in package_data: - host_manager.remove_package(host, system, custom_uninstall_playbook=package_data['uninstall_custom_playbook']) + logging.info(f"Removing package on {host}") + if 'uninstall_name' in package_data: + uninstall_name = package_data['uninstall_name'] + host_manager.remove_package(host, system, package_uninstall_name=uninstall_name) + elif 'uninstall_custom_playbook' in package_data: + host_manager.remove_package(host, system, + custom_uninstall_playbook=package_data['uninstall_custom_playbook']) + + wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or + operation_data['check']['state_index'] or + operation_data['check']['no_alerts'] or + operation_data['check']['no_indices']) - wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or - operation_data['check']['state_index'] or - operation_data['check']['no_alerts'] or - operation_data['check']['no_indices']) + if wait_is_required: + wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) - if wait_is_required: - wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) + check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, + package_data, operation='remove') - check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, - package_data, operation='remove') + else: + logging.info(f"No operation to perform on {host}") + results['checks']['all_successfull'] = True return { f"{host}": results @@ -301,34 +309,37 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos else: raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") - package_data_from = load_packages_metadata()[package_id_from] - package_data_to = load_packages_metadata()[package_id_to] + package_data_from = load_packages_metadata()[package_id_from] + package_data_to = load_packages_metadata()[package_id_to] + package_url_to = package_data_to['urls'][host_os_name][host_os_arch] - package_url_to = package_data_to['urls'][host_os_name][host_os_arch] + logging.info(f"Installing package on {host}") + logging.info(f"Package URL: {package_url_to}") - logging.info(f"Installing package on {host}") - logging.info(f"Package URL: {package_url_to}") + current_datetime = datetime.utcnow().isoformat() + host_manager.install_package(host, package_url_to, system) - current_datetime = datetime.utcnow().isoformat() - host_manager.install_package(host, package_url_to, system) + logging.info(f"Package {package_url_to} installed on {host}") - logging.info(f"Package {package_url_to} installed on {host}") + logging.info(f"Package installed on {host}") - logging.info(f"Package installed on {host}") + wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or + operation_data['check']['state_index'] or + operation_data['check']['no_alerts'] or + operation_data['check']['no_indices']) + if wait_is_required: + wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) - wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or - operation_data['check']['state_index'] or - operation_data['check']['no_alerts'] or - operation_data['check']['no_indices']) - if wait_is_required: - wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) + check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, + {'from': package_data_from, 'to': package_data_to}, operation='update') + else: + logging.info(f"No operation to perform on {host}") + results['checks']['all_successfull'] = True - check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, - {'from': package_data_from, 'to': package_data_to} , operation='update') - return { - f"{host}": results - } + return { + f"{host}": results + } def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict], host_manager: HostManager): diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json index 97123ebf51..f92d32d1a3 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json @@ -417,7 +417,8 @@ "macos": { "amd64": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg", "arm64v8": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg" - } + }, + "windows": "https://nodejs.org/dist/v19.5.0/win-x86/node.exe" }, "uninstall_name": "node*" }, From 1313024b4afe868abb3e01726784b4cb6879ca39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 14:54:15 +0000 Subject: [PATCH 165/174] fix: node vuln package in windows structure MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Víctor Rebollo Pérez --- .../vulnerability_detector_packages/vuln_packages.json | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json index f92d32d1a3..00ea6da130 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json @@ -418,7 +418,9 @@ "amd64": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg", "arm64v8": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg" }, - "windows": "https://nodejs.org/dist/v19.5.0/win-x86/node.exe" + "windows": { + "amd64": "https://nodejs.org/dist/v19.5.0/win-x86/node.exe" + } }, "uninstall_name": "node*" }, @@ -431,7 +433,9 @@ "amd64": "https://nodejs.org/dist/v19.6.0/node-v19.6.0.pkg", "arm64v8": "https://nodejs.org/dist/v19.6.0/node-v19.6.0.pkg" }, - "windows": "https://nodejs.org/dist/v19.6.0/win-x86/node.exe" + "windows": { + "amd64": "https://nodejs.org/dist/v19.6.0/win-x86/node.exe" + } }, "uninstall_name": "node*" }, From 34d967ab7f4b55991578d27553ef1720b5dcd2e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Fri, 2 Feb 2024 15:22:20 +0000 Subject: [PATCH 166/174] fix: update nonvuln package case --- .../end_to_end/remote_operations_handler.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 6e4ea7071a..80d13cb4b3 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -133,7 +133,9 @@ def install_package(host: str, operation_data: Dict[str, Dict], host_manager: Ho "alerts_found_unexpected": [], "states_found_unexpected": [] }, - 'checks': {} + 'checks': { + 'all_successfull': True + } } logging.info(f"Installing package on {host}") @@ -182,7 +184,6 @@ def install_package(host: str, operation_data: Dict[str, Dict], host_manager: Ho package_data, operation='install') else: logging.info(f"No operation to perform on {host}") - results['checks']['all_successfull'] = True return { f"{host}": results @@ -211,7 +212,9 @@ def remove_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos "alerts_found_unexpected": [], "states_found_unexpected": [] }, - 'checks': {} + 'checks': { + 'all_successfull': True + } } host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] host_os_arch = host_manager.get_host_variables(host)['architecture'] @@ -253,7 +256,6 @@ def remove_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos else: logging.info(f"No operation to perform on {host}") - results['checks']['all_successfull'] = True return { f"{host}": results @@ -282,7 +284,10 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos "alerts_found_unexpected": [], "states_found_unexpected": [] }, - 'checks': {} + 'checks': { + 'all_successfull': True + } + } host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] @@ -335,7 +340,6 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos {'from': package_data_from, 'to': package_data_to}, operation='update') else: logging.info(f"No operation to perform on {host}") - results['checks']['all_successfull'] = True return { f"{host}": results From ea56468933c3e870e07fa62d68a79230b42915a8 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 5 Feb 2024 08:52:49 +0100 Subject: [PATCH 167/174] docs: update changelog --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e837a350b0..c5052ae94f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ All notable changes to this project will be documented in this file. ### Added +- Add Vulnerability Detector end to end tests ([#4878](https://github.com/wazuh/wazuh-qa/pull/4878)) \- (Framework + Tests) - Agent Simulator: Syscollector message generation refactoring ([#4868](https://github.com/wazuh/wazuh-qa/pull/4868)) \- (Framework) - Migrate Wazuh Ansibles Roles. ([#4642](https://github.com/wazuh/wazuh-qa/pull/4642)) \- (Framework) - Add scans environment setup documentation. ([#4444](https://github.com/wazuh/wazuh-qa/pull/4444)) \- (Tests) @@ -42,7 +43,7 @@ All notable changes to this project will be documented in this file. - Fix duplicated jq dependency ([#4678](https://github.com/wazuh/wazuh-qa/pull/4678)) \- (Framework) - Fix test_file_checker in check_mtime case ([#4873](https://github.com/wazuh/wazuh-qa/pull/4873)) \- (Tests) -## [4.7.2] - TBD +## [4.7.2] - 10/01/2024 ### Fixed From f6c7b8c1a310820225c8f4a6a3c24876839fbb7a Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 5 Feb 2024 09:00:45 +0100 Subject: [PATCH 168/174] refactor: bump revision --- version.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.json b/version.json index 07cbaefcfc..bcf2a1709a 100644 --- a/version.json +++ b/version.json @@ -1,4 +1,4 @@ { "version": "4.8.0", - "revision": "40802" + "revision": "40803" } From e6632c9a2ef09fd0a7bf2e2c56439e06d0118db1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Mon, 5 Feb 2024 17:45:02 +0000 Subject: [PATCH 169/174] Fix service tests to handle Python version-dependent issues by optimizing Ansible runner import strategy (#4916) * fix: avoid ansible_runner import for python less than 3.7 * feat: remove debug logging in run playbook method * feat: avoid importing ansible_runner in case of windows hosts * feat: include windows condition in value error * docs: include 4916 changelog --- CHANGELOG.md | 1 + .../wazuh_testing/tools/system.py | 73 ++++++++++++------- 2 files changed, 46 insertions(+), 28 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c5052ae94f..317e4d0efc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,6 +37,7 @@ All notable changes to this project will be documented in this file. ### Fixed +- Fixed ansible_runner import conditional to avoid errors on Windows and python 3.6 ([#4916](https://github.com/wazuh/wazuh-qa/pull/4916)) \- (Framework) - Fixed IT control_service Windows loop ([#4765](https://github.com/wazuh/wazuh-qa/pull/4765)) \- (Framework) - Fix macOS agents provision to enable registration and connection with managers. ([#4770](https://github.com/wazuh/wazuh-qa/pull/4770/)) \- (Framework) - Fix hardcoded python interpreter in qa_framework role. ([#4658](https://github.com/wazuh/wazuh-qa/pull/4658)) \- (Framework) diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 67144b5106..9105d43f09 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -4,13 +4,13 @@ import json import tempfile +import sys import os import logging import xml.dom.minidom as minidom from typing import Union, List import testinfra import yaml -import ansible_runner from wazuh_testing.tools import WAZUH_CONF, WAZUH_API_CONF, API_LOG_FILE_PATH, WAZUH_LOCAL_INTERNAL_OPTIONS from wazuh_testing.tools.configuration import set_section_wazuh_conf @@ -590,38 +590,55 @@ def remove_package(self, host, system, package_uninstall_name=None, custom_unins return remove_operation_result def run_playbook(self, host, playbook_name, params=None): - file_dir = os.path.dirname(os.path.realpath(__file__)) - playbook_path = f"{file_dir}/playbooks/{playbook_name}.yaml" - new_playbook = None - new_playbook_path = None + """ + Executes an Ansible playbook on the specified host. + + Args: + host (str): The target host on which to execute the playbook. + playbook_name (str): The name of the playbook to be executed. + params (dict, optional): The parameters to be passed to the playbook. Defaults to None. - with open(playbook_path, 'r') as playbook_file: - playbook = playbook_file.read() - new_playbook = playbook.replace('HOSTS', host) + Returns: + Runner: The result of the playbook execution. - temp_dir = tempfile.mkdtemp() - new_playbook_path = f"{temp_dir}/playbook.yaml" + Raises: + ValueError: If the Python version is less than 3.7. + """ - with open(f"{temp_dir}/playbook.yaml", 'w') as playbook_file: - playbook_file.write(new_playbook) + result = None - r = None + if sys.version_info < (3, 7) or sys.platform.startswith("win"): + raise ValueError("Python 3.7 or higher and a Unix-like system are required to run Ansible playbooks.") + else: + import ansible_runner - logger.setLevel(logging.DEBUG) - try: - r = ansible_runner.run( - inventory=self.inventory_path, - playbook=new_playbook_path, - host_pattern=host, - extravars=params, - ) - print("Ansible playbook executed successfully.") - except Exception as e: - print(f"Error executing Ansible playbook: {e}") - - logger.setLevel(logging.CRITICAL) - - return r + file_dir = os.path.dirname(os.path.realpath(__file__)) + playbook_path = f"{file_dir}/playbooks/{playbook_name}.yaml" + new_playbook = None + new_playbook_path = None + + with open(playbook_path, 'r') as playbook_file: + playbook = playbook_file.read() + new_playbook = playbook.replace('HOSTS', host) + + temp_dir = tempfile.mkdtemp() + new_playbook_path = f"{temp_dir}/playbook.yaml" + + with open(f"{temp_dir}/playbook.yaml", 'w') as playbook_file: + playbook_file.write(new_playbook) + + try: + result = ansible_runner.run( + inventory=self.inventory_path, + playbook=new_playbook_path, + host_pattern=host, + extravars=params, + ) + logging.info("Ansible playbook executed successfully.") + except Exception as e: + logging.critical(f"Error executing Ansible playbook: {e}") + + return result def handle_wazuh_services(self, host, operation): """ From 7f4bddc70b61a66faeab6343457ecf32d5934f65 Mon Sep 17 00:00:00 2001 From: jnasselle Date: Mon, 5 Feb 2024 17:49:59 -0300 Subject: [PATCH 170/174] Add Ansible for Windows at requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 5215be5bc4..4ec55a61aa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -39,7 +39,7 @@ numpydoc>=1.1.0 ansible-runner>=2.0.1 ; platform_system == "Linux" docker>=5.0.0 ; platform_system == "Linux" or platform_system=='Windows' python-vagrant>=0.5.15 ; platform_system == "Linux" or platform_system=='Windows' -ansible>=3.1.0 ; platform_system == "Linux" +ansible>=3.1.0 ; platform_system == "Linux" or platform_system=='Windows' elasticsearch>=7.14.1 ; platform_system == "Linux" or platform_system=='Windows' safety==1.10.3 bandit==1.7.0 From b48fc7d6c13ccb5b43835cd4f8c2f02af3eab956 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 6 Feb 2024 08:56:02 +0000 Subject: [PATCH 171/174] refac: move HostMonitor to a new module --- .../wazuh_testing/tools/monitoring.py | 186 +---------------- .../wazuh_testing/tools/system_monitoring.py | 190 ++++++++++++++++++ requirements.txt | 2 +- .../test_active_response_log_format.py | 2 +- .../system/test_agent_auth/test_agent_auth.py | 2 +- .../test_agent_enrollment.py | 2 +- .../test_agent_files_deletion.py | 2 +- .../test_agent_groups/test_agent_groups.py | 2 +- ...test_assign_agent_group_with_enrollment.py | 2 +- .../test_assign_groups_guess.py | 2 +- .../test_agent_groups/test_remove_group.py | 2 +- .../test_agent_info_sync.py | 2 +- .../test_agent_key_polling.py | 2 +- .../test_correct_merged_file_generation.py | 2 +- .../test_integrity_sync.py | 2 +- .../test_ruleset_sync_status.py | 2 +- .../system/test_enrollment/test_enrollment.py | 2 +- .../test_fim/test_files/test_files_cud.py | 2 +- .../test_synchronization.py | 2 +- 19 files changed, 209 insertions(+), 201 deletions(-) create mode 100644 deps/wazuh_testing/wazuh_testing/tools/system_monitoring.py diff --git a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py index 87e0472c3f..62659ed503 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py @@ -20,18 +20,13 @@ import sys import threading import time -import yaml -from shutil import copyfile -from collections import defaultdict from copy import copy from datetime import datetime -from multiprocessing import Process, Manager from struct import pack, unpack -from lockfile import FileLock + from wazuh_testing import logger -from wazuh_testing.tools.file import truncate_file -from wazuh_testing.tools.system import HostManager + REMOTED_DETECTOR_PREFIX = r'.*wazuh-remoted.*' LOG_COLLECTOR_DETECTOR_PREFIX = r'.*wazuh-logcollector.*' @@ -839,24 +834,6 @@ def put_queue(self, item): self._queue.put(item) -def new_process(fn): - """Wrapper for enable multiprocessing inside a class - - Args: - fn (callable): Function to be executed in a new thread - - Returns: - wrapper - """ - - def wrapper(*args, **kwargs): - thread = Process(target=fn, args=args, kwargs=kwargs) - thread.start() - return thread - - return wrapper - - def generate_monitoring_callback(regex): """ Generates a new callback that searches for a specific pattern on a line passed. @@ -874,165 +851,6 @@ def new_callback(line): return new_callback -class HostMonitor: - """This class has the capability to monitor remote host. This monitoring consists of reading the specified files to - check that the expected message arrives to them. - - If the goals are achieved, no exceptions will be raised and therefore the test will end properly and without - failures. - - In contrast, if one or more of the goals is not covered, a timeout exception will be raised with a generic or a - custom error message. - """ - - def __init__(self, inventory_path, messages_path, tmp_path, time_step=0.5): - """Create a new instance to monitor any given file in any specified host. - - Args: - inventory_path (str): Path to the hosts's inventory file. - messages_path (str): Path to the file where the callbacks, paths and hosts to be monitored are specified. - tmp_path (str): Path to the temporal files. - time_step (float, optional): Fraction of time to wait in every get. Defaults to `0.5` - """ - self.host_manager = HostManager(inventory_path=inventory_path) - self._queue = Manager().Queue() - self._result = defaultdict(list) - self._time_step = time_step - self._file_monitors = list() - self._file_content_collectors = list() - self._tmp_path = tmp_path - try: - os.mkdir(self._tmp_path) - except OSError: - pass - with open(messages_path, 'r') as f: - self.test_cases = yaml.safe_load(f) - - def run(self, update_position=False): - """This method creates and destroy the needed processes for the messages founded in messages_path. - It creates one file composer (process) for every file to be monitored in every host.""" - for host, payload in self.test_cases.items(): - monitored_files = {case['path'] for case in payload} - if len(monitored_files) == 0: - raise AttributeError('There is no path to monitor. Exiting...') - for path in monitored_files: - output_path = f'{host}_{path.split("/")[-1]}.tmp' - self._file_content_collectors.append(self.file_composer(host=host, path=path, output_path=output_path)) - logger.debug(f'Add new file composer process for {host} and path: {path}') - self._file_monitors.append(self._start(host=host, - payload=[block for block in payload if block["path"] == path], - path=output_path)) - logger.debug(f'Add new file monitor process for {host} and path: {path}') - - while True: - if not any([handler.is_alive() for handler in self._file_monitors]): - for handler in self._file_monitors: - handler.join() - for file_collector in self._file_content_collectors: - file_collector.terminate() - file_collector.join() - self.clean_tmp_files() - break - time.sleep(self._time_step) - self.check_result() - return self.result() - - @new_process - def file_composer(self, host, path, output_path): - """Collects the file content of the specified path in the desired host and append it to the output_path file. - Simulates the behavior of tail -f and redirect the output to output_path. - - Args: - host (str): Hostname. - path (str): Host file path to be collect. - output_path (str): Output path of the content collected from the remote host path. - """ - try: - truncate_file(os.path.join(self._tmp_path, output_path)) - except FileNotFoundError: - pass - logger.debug(f'Starting file composer for {host} and path: {path}. ' - f'Composite file in {os.path.join(self._tmp_path, output_path)}') - tmp_file = os.path.join(self._tmp_path, output_path) - while True: - with FileLock(tmp_file): - with open(tmp_file, "r+") as file: - content = self.host_manager.get_file_content(host, path).split('\n') - file_content = file.read().split('\n') - for new_line in content: - if new_line == '': - continue - if new_line not in file_content: - file.write(f'{new_line}\n') - time.sleep(self._time_step) - - @new_process - def _start(self, host, payload, path, encoding=None, error_messages_per_host=None, update_position=False): - """Start the file monitoring until the QueueMonitor returns an string or TimeoutError. - - Args: - host (str): Hostname - payload (list,dict): Contains the message to be found and the timeout for it. - path (str): Path where it must search for the message. - encoding (str): Encoding of the file. - error_messages_per_host (dict): Dictionary with hostnames as keys and desired error messages as values - Returns: - Instance of HostMonitor - """ - tailer = FileTailer(os.path.join(self._tmp_path, path), time_step=self._time_step) - try: - if encoding is not None: - tailer.encoding = encoding - tailer.start() - for case in payload: - logger.debug(f'Starting QueueMonitor for {host} and message: {case["regex"]}') - monitor = QueueMonitor(tailer.queue, time_step=self._time_step) - try: - self._queue.put({host: monitor.start(timeout=case['timeout'], - callback=make_callback(pattern=case['regex'], prefix='.*'), - update_position=False - ).result()}) - except TimeoutError: - try: - self._queue.put({host: error_messages_per_host[host]}) - except (KeyError, TypeError): - self._queue.put({ - host: TimeoutError(f'Did not found the expected callback in {host}: {case["regex"]}')}) - logger.debug(f'Finishing QueueMonitor for {host} and message: {case["regex"]}') - finally: - tailer.shutdown() - - return self - - def result(self): - """Get the result of HostMonitor - - Args: - dict (dict): Dict that contains the host as the key and a list of messages as the values - """ - return self._result - - def check_result(self): - """Check if a TimeoutError occurred.""" - logger.debug(f'Checking results...') - while not self._queue.empty(): - result = self._queue.get(block=True) - for host, msg in result.items(): - if isinstance(msg, TimeoutError): - raise msg - logger.debug(f'Received from {host} the expected message: {msg}') - self._result[host].append(msg) - - def clean_tmp_files(self): - """Remove tmp files.""" - logger.debug("Cleaning temporal files...") - for file in os.listdir(self._tmp_path): - tmp_file = os.path.join(self._tmp_path, file) - if file.endswith(".log.tmp"): - copyfile(tmp_file, os.path.join("/tmp", os.path.splitext(file)[0])) - os.remove(tmp_file) - - def wait_mtime(path, time_step=5, timeout=-1): """ Wait until the monitored log is not being modified. diff --git a/deps/wazuh_testing/wazuh_testing/tools/system_monitoring.py b/deps/wazuh_testing/wazuh_testing/tools/system_monitoring.py new file mode 100644 index 0000000000..8cba996c1a --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/tools/system_monitoring.py @@ -0,0 +1,190 @@ +import yaml +import os +import time + +from multiprocessing import Process, Manager +from collections import defaultdict +from lockfile import FileLock +from shutil import copyfile + +from wazuh_testing import logger +from wazuh_testing.tools.system import HostManager +from wazuh_testing.tools.file import truncate_file +from wazuh_testing.tools.monitoring import FileTailer, QueueMonitor, make_callback + + +def new_process(fn): + """Wrapper for enable multiprocessing inside a class + + Args: + fn (callable): Function to be executed in a new thread + + Returns: + wrapper + """ + + def wrapper(*args, **kwargs): + thread = Process(target=fn, args=args, kwargs=kwargs) + thread.start() + return thread + + return wrapper + + +class HostMonitor: + """This class has the capability to monitor remote host. This monitoring consists of reading the specified files to + check that the expected message arrives to them. + + If the goals are achieved, no exceptions will be raised and therefore the test will end properly and without + failures. + + In contrast, if one or more of the goals is not covered, a timeout exception will be raised with a generic or a + custom error message. + """ + + def __init__(self, inventory_path, messages_path, tmp_path, time_step=0.5): + """Create a new instance to monitor any given file in any specified host. + + Args: + inventory_path (str): Path to the hosts's inventory file. + messages_path (str): Path to the file where the callbacks, paths and hosts to be monitored are specified. + tmp_path (str): Path to the temporal files. + time_step (float, optional): Fraction of time to wait in every get. Defaults to `0.5` + """ + self.host_manager = HostManager(inventory_path=inventory_path) + self._queue = Manager().Queue() + self._result = defaultdict(list) + self._time_step = time_step + self._file_monitors = list() + self._file_content_collectors = list() + self._tmp_path = tmp_path + try: + os.mkdir(self._tmp_path) + except OSError: + pass + with open(messages_path, 'r') as f: + self.test_cases = yaml.safe_load(f) + + def run(self, update_position=False): + """This method creates and destroy the needed processes for the messages founded in messages_path. + It creates one file composer (process) for every file to be monitored in every host.""" + for host, payload in self.test_cases.items(): + monitored_files = {case['path'] for case in payload} + if len(monitored_files) == 0: + raise AttributeError('There is no path to monitor. Exiting...') + for path in monitored_files: + output_path = f'{host}_{path.split("/")[-1]}.tmp' + self._file_content_collectors.append(self.file_composer(host=host, path=path, output_path=output_path)) + logger.debug(f'Add new file composer process for {host} and path: {path}') + self._file_monitors.append(self._start(host=host, + payload=[block for block in payload if block["path"] == path], + path=output_path)) + logger.debug(f'Add new file monitor process for {host} and path: {path}') + + while True: + if not any([handler.is_alive() for handler in self._file_monitors]): + for handler in self._file_monitors: + handler.join() + for file_collector in self._file_content_collectors: + file_collector.terminate() + file_collector.join() + self.clean_tmp_files() + break + time.sleep(self._time_step) + self.check_result() + return self.result() + + @new_process + def file_composer(self, host, path, output_path): + """Collects the file content of the specified path in the desired host and append it to the output_path file. + Simulates the behavior of tail -f and redirect the output to output_path. + + Args: + host (str): Hostname. + path (str): Host file path to be collect. + output_path (str): Output path of the content collected from the remote host path. + """ + try: + truncate_file(os.path.join(self._tmp_path, output_path)) + except FileNotFoundError: + pass + logger.debug(f'Starting file composer for {host} and path: {path}. ' + f'Composite file in {os.path.join(self._tmp_path, output_path)}') + tmp_file = os.path.join(self._tmp_path, output_path) + while True: + with FileLock(tmp_file): + with open(tmp_file, "r+") as file: + content = self.host_manager.get_file_content(host, path).split('\n') + file_content = file.read().split('\n') + for new_line in content: + if new_line == '': + continue + if new_line not in file_content: + file.write(f'{new_line}\n') + time.sleep(self._time_step) + + @new_process + def _start(self, host, payload, path, encoding=None, error_messages_per_host=None, update_position=False): + """Start the file monitoring until the QueueMonitor returns an string or TimeoutError. + + Args: + host (str): Hostname + payload (list,dict): Contains the message to be found and the timeout for it. + path (str): Path where it must search for the message. + encoding (str): Encoding of the file. + error_messages_per_host (dict): Dictionary with hostnames as keys and desired error messages as values + Returns: + Instance of HostMonitor + """ + tailer = FileTailer(os.path.join(self._tmp_path, path), time_step=self._time_step) + try: + if encoding is not None: + tailer.encoding = encoding + tailer.start() + for case in payload: + logger.debug(f'Starting QueueMonitor for {host} and message: {case["regex"]}') + monitor = QueueMonitor(tailer.queue, time_step=self._time_step) + try: + self._queue.put({host: monitor.start(timeout=case['timeout'], + callback=make_callback(pattern=case['regex'], prefix='.*'), + update_position=False + ).result()}) + except TimeoutError: + try: + self._queue.put({host: error_messages_per_host[host]}) + except (KeyError, TypeError): + self._queue.put({ + host: TimeoutError(f'Did not found the expected callback in {host}: {case["regex"]}')}) + logger.debug(f'Finishing QueueMonitor for {host} and message: {case["regex"]}') + finally: + tailer.shutdown() + + return self + + def result(self): + """Get the result of HostMonitor + + Args: + dict (dict): Dict that contains the host as the key and a list of messages as the values + """ + return self._result + + def check_result(self): + """Check if a TimeoutError occurred.""" + logger.debug('Checking results...') + while not self._queue.empty(): + result = self._queue.get(block=True) + for host, msg in result.items(): + if isinstance(msg, TimeoutError): + raise msg + logger.debug(f'Received from {host} the expected message: {msg}') + self._result[host].append(msg) + + def clean_tmp_files(self): + """Remove tmp files.""" + logger.debug("Cleaning temporal files...") + for file in os.listdir(self._tmp_path): + tmp_file = os.path.join(self._tmp_path, file) + if file.endswith(".log.tmp"): + copyfile(tmp_file, os.path.join("/tmp", os.path.splitext(file)[0])) + os.remove(tmp_file) diff --git a/requirements.txt b/requirements.txt index 4ec55a61aa..5215be5bc4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -39,7 +39,7 @@ numpydoc>=1.1.0 ansible-runner>=2.0.1 ; platform_system == "Linux" docker>=5.0.0 ; platform_system == "Linux" or platform_system=='Windows' python-vagrant>=0.5.15 ; platform_system == "Linux" or platform_system=='Windows' -ansible>=3.1.0 ; platform_system == "Linux" or platform_system=='Windows' +ansible>=3.1.0 ; platform_system == "Linux" elasticsearch>=7.14.1 ; platform_system == "Linux" or platform_system=='Windows' safety==1.10.3 bandit==1.7.0 diff --git a/tests/system/test_active_response_log_format/test_active_response_log_format.py b/tests/system/test_active_response_log_format/test_active_response_log_format.py index 7f8cd3d4af..cf9d28c4c8 100644 --- a/tests/system/test_active_response_log_format/test_active_response_log_format.py +++ b/tests/system/test_active_response_log_format/test_active_response_log_format.py @@ -45,7 +45,7 @@ import pytest from random import randint from wazuh_testing.tools import WAZUH_LOGS_PATH -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager, clean_environment diff --git a/tests/system/test_agent_auth/test_agent_auth.py b/tests/system/test_agent_auth/test_agent_auth.py index 173818a23b..d765951270 100644 --- a/tests/system/test_agent_auth/test_agent_auth.py +++ b/tests/system/test_agent_auth/test_agent_auth.py @@ -30,7 +30,7 @@ from wazuh_testing.tools import WAZUH_PATH, WAZUH_LOGS_PATH from wazuh_testing.tools.file import read_yaml -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager from wazuh_testing.tools.utils import format_ipv6_long diff --git a/tests/system/test_cluster/test_agent_enrollment/test_agent_enrollment.py b/tests/system/test_cluster/test_agent_enrollment/test_agent_enrollment.py index 37cc91bd5a..2cb61ca7a5 100644 --- a/tests/system/test_cluster/test_agent_enrollment/test_agent_enrollment.py +++ b/tests/system/test_cluster/test_agent_enrollment/test_agent_enrollment.py @@ -6,7 +6,7 @@ import pytest from wazuh_testing.tools import WAZUH_PATH, WAZUH_LOGS_PATH -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager diff --git a/tests/system/test_cluster/test_agent_files_deletion/test_agent_files_deletion.py b/tests/system/test_cluster/test_agent_files_deletion/test_agent_files_deletion.py index 870fdd5351..b750865237 100644 --- a/tests/system/test_cluster/test_agent_files_deletion/test_agent_files_deletion.py +++ b/tests/system/test_cluster/test_agent_files_deletion/test_agent_files_deletion.py @@ -8,7 +8,7 @@ import pytest from wazuh_testing.tools import WAZUH_PATH, WAZUH_LOGS_PATH -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager diff --git a/tests/system/test_cluster/test_agent_groups/test_agent_groups.py b/tests/system/test_cluster/test_agent_groups/test_agent_groups.py index 64c8f8f742..0962eb0331 100644 --- a/tests/system/test_cluster/test_agent_groups/test_agent_groups.py +++ b/tests/system/test_cluster/test_agent_groups/test_agent_groups.py @@ -7,7 +7,7 @@ from time import sleep import pytest -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager from system.test_cluster.test_agent_groups.common import register_agent from system import AGENT_STATUS_ACTIVE, check_agent_status, restart_cluster, execute_wdb_query diff --git a/tests/system/test_cluster/test_agent_groups/test_assign_agent_group_with_enrollment.py b/tests/system/test_cluster/test_agent_groups/test_assign_agent_group_with_enrollment.py index 5531ea9452..d910800a15 100644 --- a/tests/system/test_cluster/test_agent_groups/test_assign_agent_group_with_enrollment.py +++ b/tests/system/test_cluster/test_agent_groups/test_assign_agent_group_with_enrollment.py @@ -47,7 +47,7 @@ from system import (ERR_MSG_CLIENT_KEYS_IN_MASTER_NOT_FOUND, restart_cluster, check_keys_file, delete_agent_group, check_agent_groups_db) -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager from wazuh_testing.tools import WAZUH_PATH diff --git a/tests/system/test_cluster/test_agent_groups/test_assign_groups_guess.py b/tests/system/test_cluster/test_agent_groups/test_assign_groups_guess.py index 6d56111ad8..7946cb6493 100644 --- a/tests/system/test_cluster/test_agent_groups/test_assign_groups_guess.py +++ b/tests/system/test_cluster/test_agent_groups/test_assign_groups_guess.py @@ -50,7 +50,7 @@ assign_agent_to_new_group, restart_cluster) from wazuh_testing.tools.system import HostManager from wazuh_testing.tools.file import replace_regex_in_file -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools import WAZUH_PATH, WAZUH_LOCAL_INTERNAL_OPTIONS diff --git a/tests/system/test_cluster/test_agent_groups/test_remove_group.py b/tests/system/test_cluster/test_agent_groups/test_remove_group.py index ad140ded38..8e8810df37 100644 --- a/tests/system/test_cluster/test_agent_groups/test_remove_group.py +++ b/tests/system/test_cluster/test_agent_groups/test_remove_group.py @@ -8,7 +8,7 @@ from wazuh_testing import T_10 from system.test_cluster.test_agent_groups.common import register_agent from wazuh_testing.tools.configuration import get_test_cases_data -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager pytestmark = [pytest.mark.cluster, pytest.mark.enrollment_cluster_env] diff --git a/tests/system/test_cluster/test_agent_info_sync/test_agent_info_sync.py b/tests/system/test_cluster/test_agent_info_sync/test_agent_info_sync.py index 2ed2ec8c73..835d49ddaa 100644 --- a/tests/system/test_cluster/test_agent_info_sync/test_agent_info_sync.py +++ b/tests/system/test_cluster/test_agent_info_sync/test_agent_info_sync.py @@ -9,7 +9,7 @@ import pytest from wazuh_testing.tools import WAZUH_PATH, WAZUH_LOGS_PATH -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager diff --git a/tests/system/test_cluster/test_agent_key_polling/test_agent_key_polling.py b/tests/system/test_cluster/test_agent_key_polling/test_agent_key_polling.py index c3029f80fd..63bde4ce66 100644 --- a/tests/system/test_cluster/test_agent_key_polling/test_agent_key_polling.py +++ b/tests/system/test_cluster/test_agent_key_polling/test_agent_key_polling.py @@ -6,7 +6,7 @@ import pytest from wazuh_testing.tools import WAZUH_LOGS_PATH -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager diff --git a/tests/system/test_cluster/test_correct_merged_file_generation/test_correct_merged_file_generation.py b/tests/system/test_cluster/test_correct_merged_file_generation/test_correct_merged_file_generation.py index 94a045b05e..7bd8b73c7d 100644 --- a/tests/system/test_cluster/test_correct_merged_file_generation/test_correct_merged_file_generation.py +++ b/tests/system/test_cluster/test_correct_merged_file_generation/test_correct_merged_file_generation.py @@ -29,7 +29,7 @@ from wazuh_testing import T_1, T_10 from wazuh_testing.tools import WAZUH_PATH from wazuh_testing.tools.file import read_yaml -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager from wazuh_testing.tools.file import replace_regex_in_file from system import (assign_agent_to_new_group, clean_cluster_logs, create_new_agent_group, delete_agent_group, diff --git a/tests/system/test_cluster/test_integrity_sync/test_integrity_sync.py b/tests/system/test_cluster/test_integrity_sync/test_integrity_sync.py index 120a1e6123..e922da5d42 100644 --- a/tests/system/test_cluster/test_integrity_sync/test_integrity_sync.py +++ b/tests/system/test_cluster/test_integrity_sync/test_integrity_sync.py @@ -12,7 +12,7 @@ import yaml from wazuh_testing.tools import WAZUH_PATH, PYTHON_PATH, WAZUH_LOGS_PATH -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager diff --git a/tests/system/test_cluster/test_ruleset_sync_status/test_ruleset_sync_status.py b/tests/system/test_cluster/test_ruleset_sync_status/test_ruleset_sync_status.py index 1d16ce932a..6c42efe1f9 100644 --- a/tests/system/test_cluster/test_ruleset_sync_status/test_ruleset_sync_status.py +++ b/tests/system/test_cluster/test_ruleset_sync_status/test_ruleset_sync_status.py @@ -7,7 +7,7 @@ import pytest from wazuh_testing.tools import WAZUH_PATH, WAZUH_LOGS_PATH -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager diff --git a/tests/system/test_enrollment/test_enrollment.py b/tests/system/test_enrollment/test_enrollment.py index ec7b274b62..74be192150 100644 --- a/tests/system/test_enrollment/test_enrollment.py +++ b/tests/system/test_enrollment/test_enrollment.py @@ -34,7 +34,7 @@ from wazuh_testing.tools import WAZUH_PATH, WAZUH_LOGS_PATH from wazuh_testing.tools.file import read_file, read_yaml, write_file -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager from wazuh_testing.tools.utils import format_ipv6_long diff --git a/tests/system/test_fim/test_files/test_files_cud.py b/tests/system/test_fim/test_files/test_files_cud.py index 9a483887b9..07e0051282 100644 --- a/tests/system/test_fim/test_files/test_files_cud.py +++ b/tests/system/test_fim/test_files/test_files_cud.py @@ -43,7 +43,7 @@ import os import pytest -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager, clean_environment from wazuh_testing.tools import WAZUH_LOGS_PATH from wazuh_testing.fim import create_folder_file, wait_for_fim_scan_end diff --git a/tests/system/test_fim/test_synchronization/test_synchronization.py b/tests/system/test_fim/test_synchronization/test_synchronization.py index 21395031e7..c823a0356e 100644 --- a/tests/system/test_fim/test_synchronization/test_synchronization.py +++ b/tests/system/test_fim/test_synchronization/test_synchronization.py @@ -46,7 +46,7 @@ from time import sleep -from wazuh_testing.tools.monitoring import HostMonitor +from wazuh_testing.tools.system_monitoring import HostMonitor from wazuh_testing.tools.system import HostManager, clean_environment from wazuh_testing.tools import WAZUH_LOGS_PATH from wazuh_testing.fim import create_folder_file, query_db From 75970365262a9974a3fe46616fe179694bfec7cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 6 Feb 2024 10:11:18 +0000 Subject: [PATCH 172/174] docs: include 4917 to the current changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 317e4d0efc..318501d87a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,7 +37,7 @@ All notable changes to this project will be documented in this file. ### Fixed -- Fixed ansible_runner import conditional to avoid errors on Windows and python 3.6 ([#4916](https://github.com/wazuh/wazuh-qa/pull/4916)) \- (Framework) +- Fixed ansible and ansible_runner import conditional to avoid errors on Windows and python 3.6 ([#4916](https://github.com/wazuh/wazuh-qa/pull/4916), [#4917](https://github.com/wazuh/wazuh-qa/pull/4917/)) \- (Framework) - Fixed IT control_service Windows loop ([#4765](https://github.com/wazuh/wazuh-qa/pull/4765)) \- (Framework) - Fix macOS agents provision to enable registration and connection with managers. ([#4770](https://github.com/wazuh/wazuh-qa/pull/4770/)) \- (Framework) - Fix hardcoded python interpreter in qa_framework role. ([#4658](https://github.com/wazuh/wazuh-qa/pull/4658)) \- (Framework) From 40a4658702bde429a9c096f5fee92d21eb595db8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 6 Feb 2024 10:17:21 +0000 Subject: [PATCH 173/174] docs: split 4917 and 4916 changelog lines --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 318501d87a..170551294b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -37,7 +37,8 @@ All notable changes to this project will be documented in this file. ### Fixed -- Fixed ansible and ansible_runner import conditional to avoid errors on Windows and python 3.6 ([#4916](https://github.com/wazuh/wazuh-qa/pull/4916), [#4917](https://github.com/wazuh/wazuh-qa/pull/4917/)) \- (Framework) +- Migrate HostMonitor to system_monitoring to avoid Windows import of ansible module ([#4917](https://github.com/wazuh/wazuh-qa/pull/4917/)) \- (Framework) +- Fixed ansible_runner import conditional to avoid errors on Windows and python 3.6 ([#4916](https://github.com/wazuh/wazuh-qa/pull/4916)) \- (Framework) - Fixed IT control_service Windows loop ([#4765](https://github.com/wazuh/wazuh-qa/pull/4765)) \- (Framework) - Fix macOS agents provision to enable registration and connection with managers. ([#4770](https://github.com/wazuh/wazuh-qa/pull/4770/)) \- (Framework) - Fix hardcoded python interpreter in qa_framework role. ([#4658](https://github.com/wazuh/wazuh-qa/pull/4658)) \- (Framework) From 34352792bcaf7ada974fd843b2e66fda7ffb7cba Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 7 Feb 2024 15:27:09 +0100 Subject: [PATCH 174/174] docs: update changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a88062c9d1..7cca641be2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,10 @@ All notable changes to this project will be documented in this file. - Removed configobj library from requirements.txt ([#4803](https://github.com/wazuh/wazuh-qa/pull/4803)) \- (Framework) - Updated integration tests README ([#4742](https://github.com/wazuh/wazuh-qa/pull/4742)) \- (Framework) +### Fixed + +- Fix manager_agent system tests environment ([#4808](https://github.com/wazuh/wazuh-qa/pull/4808)) \- (Framework) + ## [4.8.0] - TBD ### Added