From 940fb60fdb5d5a0421cdc5fc3f8276f85a97e6f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=ADctor=20Rebollo=20P=C3=A9rez?= Date: Tue, 23 Jan 2024 09:00:21 +0000 Subject: [PATCH] refac: remote operations in install/remote/update package functions --- .../end_to_end/remote_operations_handler.py | 359 +++++++++++++++--- .../end_to_end/vulnerability_detector.py | 145 +++---- .../vuln_packages.json | 108 ++++++ .../wazuh_testing/tools/system.py | 7 + .../cases/test_vulnerability.yaml | 147 ++++--- .../test_vulnerability_detector/conftest.py | 33 +- .../test_vulnerability_detector.py | 46 ++- 7 files changed, 633 insertions(+), 212 deletions(-) create mode 100644 deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index 50ab7353fa..31e474e4df 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -20,15 +20,15 @@ This program is a free software; you can redistribute it and/or modify it under the terms of GPLv2 """ -import re +import os +import json +import logging from typing import Dict, List from multiprocessing.pool import ThreadPool -from datetime import datetime, timezone -import logging +from datetime import datetime +from concurrent.futures import ThreadPoolExecutor -from wazuh_testing.end_to_end.indexer_api import get_indexer_values from wazuh_testing.tools.system import HostManager -from wazuh_testing.end_to_end.wazuh_api import get_agents_vulnerabilities from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs, monitoring_events_multihost from wazuh_testing.end_to_end.waiters import wait_until_vuln_scan_agents_finished from wazuh_testing.end_to_end.regex import get_event_regex @@ -36,10 +36,22 @@ from wazuh_testing.end_to_end.vulnerability_detector import check_vuln_alert_indexer, check_vuln_state_index -def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_manager: HostManager, - current_datetime: str = None): +def load_packages_metadata(): + """ + Load packages metadata from the packages.json file. """ - Launch a remote operation on the specified host. + packages_filepath = os.path.join(os.path.dirname(__file__), + 'vulnerability_detector_packages', 'vuln_packages.json') + + with open(packages_filepath, 'r') as packages_file: + packages_data = json.load(packages_file) + + return packages_data + + +def install_package(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): + """ + Install a package on the specified host. Args: host (str): The target host on which to perform the operation. @@ -49,33 +61,127 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man Raises: ValueError: If the specified operation is not recognized. """ - logging.critical(f"Launching remote operation: {operation_data}") - + logging.critical(f"Installing package on {host}") host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] host_os_arch = host_manager.get_host_variables(host)['architecture'] system = host_manager.get_host_variables(host)['os_name'] - operation = operation_data['operation'] - if system == 'linux': system = host_manager.get_host_variables(host)['os'].split('_')[0] - if operation == 'install_package': - logging.critical(f"Installing package on {host}") + install_package_data = operation_data['package'] + package_id = None - package_data = operation_data['package'] - package_url = package_data[host_os_name][host_os_arch] - - if isinstance(package_url, list): - for package in package_url: - host_manager.install_package(host, package, system) + if host_os_name in install_package_data: + if host_os_arch in install_package_data[host_os_name]: + package_id = install_package_data[host_os_name][host_os_arch] else: - host_manager.install_package(host, package_url, system) + raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") - logging.critical(f"Package installed on {host}") - logging.critical(f"Waiting for syscollector scan to finish on {host}") + package_data = load_packages_metadata()[package_id] + package_url = package_data['urls'][host_os_name][host_os_arch] + logging.critical(f"Installing package on {host}") + logging.critical(f"Package URL: {package_url}") + + current_datetime = datetime.utcnow().isoformat() + host_manager.install_package(host, package_url, system) + + logging.critical(f"Package installed on {host}") + + if operation_data['check']['alerts'] or operation_data['check']['state_index']: + logging.critical(f"Waiting for syscollector scan to finish on {host}") TIMEOUT_SYSCOLLECTOR_SCAN = 80 + truncate_remote_host_group_files(host_manager, 'agent', 'logs') + + # Wait until syscollector + monitoring_data = generate_monitoring_logs(host_manager, + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], + host_manager.get_group_hosts('agent')) + + result = monitoring_events_multihost(host_manager, monitoring_data) + + logging.critical(f"Syscollector scan finished with result: {result}") + + truncate_remote_host_group_files(host_manager, 'manager', 'logs') + + logging.critical(f"Waiting for vulnerability scan to finish on {host}") + + wait_until_vuln_scan_agents_finished(host_manager) + + logging.critical(f"Checking agent vulnerability on {host}") + + results = { + 'evidences': { + "alerts_not_found": [], + "states_not_found": [] + }, + 'checks': {} + } + + if 'check' in operation_data: + if operation_data['check']['alerts']: + logging.critical(f'Checking vulnerability alerts in the indexer for {host}') + results["alerts_not_found"] = check_vuln_alert_indexer(host_manager, host, package_data, + current_datetime) + + if operation_data['check']['state_index']: + logging.critical(f'Checking vulnerability state index for {host}') + results["states_not_found"] = check_vuln_state_index(host_manager, host, package_data, + current_datetime) + + logging.critical(f"Results: {results}") + + if results['alerts_not_found'] or results['states_not_found']: + results['checks']['all_successfull'] = False + else: + results['checks']['all_successfull'] = True + + return { + f"{host}": results + } + + +def remove_package(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): + """ + Install a package on the specified host. + + Args: + host (str): The target host on which to perform the operation. + operation_data (dict): Dictionary containing operation details. + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + + Raises: + ValueError: If the specified operation is not recognized. + """ + logging.critical(f"Removing package on {host}") + host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] + host_os_arch = host_manager.get_host_variables(host)['architecture'] + system = host_manager.get_host_variables(host)['os_name'] + if system == 'linux': + system = host_manager.get_host_variables(host)['os'].split('_')[0] + + package_data = operation_data['package'] + package_id = None + + if host_os_name in package_data: + if host_os_arch in package_data[host_os_name]: + package_id = package_data[host_os_name][host_os_arch] + else: + raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") + + package_data = load_packages_metadata()[package_id] + logging.critical(f"Removing package on {host}") + uninstall_name = package_data['uninstall_name'] + + current_datetime = datetime.utcnow().isoformat() + host_manager.remove_package(host, uninstall_name, system) + + if operation_data['check']['alerts'] or operation_data['check']['state_index']: + logging.critical(f"Waiting for syscollector scan to finish on {host}") + TIMEOUT_SYSCOLLECTOR_SCAN = 80 truncate_remote_host_group_files(host_manager, 'agent', 'logs') # Wait until syscollector @@ -93,49 +199,167 @@ def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_man logging.critical(f"Waiting for vulnerability scan to finish on {host}") - # Wait until VD scan wait_until_vuln_scan_agents_finished(host_manager) - elif operation == 'remove_package': - logging.critical(f"Removing package on {host}") - package_data = operation_data['package'] - package_name = package_data[host_os_name][host_os_arch] - host_manager.remove_package(host, package_name, system) + logging.critical(f"Checking agent vulnerability on {host}") + + results = { + 'evidences': { + "alerts_not_found": [], + "states_found": [] + }, + 'checks': {} + } + + logging.critical("Operation data is: {}".format(package_data)) + + if 'check' in operation_data: + if operation_data['check']['alerts'] or operation_data['check']['states']: + if operation_data['check']['alerts']: + logging.critical(f'Checking vulnerability alerts in the indexer for {host}') + results["evidences"]["alerts_not_found"] = check_vuln_alert_indexer(host_manager, host, package_data, + current_datetime, + vuln_mitigated=True) + + if operation_data['check']['state_index']: + logging.critical(f'Checking vulnerability state index for {host}') + states_not_found = check_vuln_state_index(host_manager, host, package_data, + current_datetime, return_found=True) + + results['evidences']["states_found"] = states_not_found + + if results['evidences']['alerts_not_found'] or len(results['evidences']['states_found']) > 0: + results['checks']['all_successfull'] = False + else: + results['checks']['all_successfull'] = True + + return { + f"{host}": results + } + + +def update_package(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): + """ + Install a package on the specified host. + + Args: + host (str): The target host on which to perform the operation. + operation_data (dict): Dictionary containing operation details. + host_manager (HostManager): An instance of the HostManager class containing information about hosts. + + Raises: + ValueError: If the specified operation is not recognized. + """ + logging.critical(f"Updating package on {host}") + + host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0] + host_os_arch = host_manager.get_host_variables(host)['architecture'] + system = host_manager.get_host_variables(host)['os_name'] + if system == 'linux': + system = host_manager.get_host_variables(host)['os'].split('_')[0] + + install_package_data_from = operation_data['package']['from'] + install_package_data_to= operation_data['package']['to'] - TIMEOUT_SYSCOLLECTOR_SCAN = 60 + package_id_from = None + package_id_to = None + if host_os_name in install_package_data_from: + if host_os_arch in install_package_data_from[host_os_name]: + package_id_from = install_package_data_from[host_os_name][host_os_arch] + else: + raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") + + if host_os_name in install_package_data_to: + if host_os_arch in install_package_data_to[host_os_name]: + package_id_to = install_package_data_to[host_os_name][host_os_arch] + else: + raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") + + package_data_from = load_packages_metadata()[package_id_from] + package_data_to = load_packages_metadata()[package_id_to] + + package_url_from = package_data_from['urls'][host_os_name][host_os_arch] + package_url_to = package_data_to['urls'][host_os_name][host_os_arch] + + logging.critical(f"Installing package on {host}") + logging.critical(f"Package URL: {package_url_to}") + + current_datetime = datetime.utcnow().isoformat() + host_manager.install_package(host, package_url_to, system) + + logging.critical(f"Package installed on {host}") + + if operation_data['check']['alerts'] or operation_data['check']['state_index']: + logging.critical(f"Waiting for syscollector scan to finish on {host}") + TIMEOUT_SYSCOLLECTOR_SCAN = 80 truncate_remote_host_group_files(host_manager, 'agent', 'logs') + # Wait until syscollector monitoring_data = generate_monitoring_logs(host_manager, - [get_event_regex({'event': 'syscollector_scan_start'}), - get_event_regex({'event': 'syscollector_scan_end'})], - [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], host_manager.get_group_hosts('agent')) + [get_event_regex({'event': 'syscollector_scan_start'}), + get_event_regex({'event': 'syscollector_scan_end'})], + [TIMEOUT_SYSCOLLECTOR_SCAN, TIMEOUT_SYSCOLLECTOR_SCAN], + host_manager.get_group_hosts('agent')) - monitoring_events_multihost(host_manager, monitoring_data) + result = monitoring_events_multihost(host_manager, monitoring_data) + + logging.critical(f"Syscollector scan finished with result: {result}") truncate_remote_host_group_files(host_manager, 'manager', 'logs') - # Wait until VD scan + logging.critical(f"Waiting for vulnerability scan to finish on {host}") + wait_until_vuln_scan_agents_finished(host_manager) - elif operation == 'check_agent_vulnerability': logging.critical(f"Checking agent vulnerability on {host}") results = { - "alerts_not_found": [], - "states_not_found": [] + 'evidences': { + "alerts_not_found_from": [], + "states_found_from": [], + "alerts_not_found_to": [], + "states_not_found_to": [], + }, + 'checks': {} } - if operation_data['parameters']['alert_indexed']: - logging.critical(f'Checking vulnerability alerts in the indexer for {host}') - results["alerts_not_found"] = check_vuln_alert_indexer(host_manager, operation_data['vulnerability_data'], current_datetime) + if 'check' in operation_data: + if operation_data['check']['alerts']: + logging.critical(f'Checking vulnerability alerts in the indexer for {host}. Expected CVE mitigation') + results["evidences"]["alerts_not_found_from"] = check_vuln_alert_indexer(host_manager, host, package_data_from, + current_datetime, + vuln_mitigated=True) + + if operation_data['check']['state_index']: + logging.critical(f'Checking vulnerability state index for {host}') + states_not_found = check_vuln_state_index(host_manager, host, package_data_from, + current_datetime, return_found=True) + results['evidences']["states_found_from"] = states_not_found + + logging.critical(f'Checking vulnerability alerts in the indexer for {host}. Expected CVE vuln of new package version') + + if operation_data['check']['alerts']: + logging.critical(f'Checking vulnerability alerts in the indexer for {host}') + results["alerts_not_found_to"] = check_vuln_alert_indexer(host_manager, host, package_data_to, + current_datetime) + + if operation_data['check']['state_index']: + logging.critical(f'Checking vulnerability state index for {host}') + results["states_not_found_to"] = check_vuln_state_index(host_manager, host, package_data_to, + current_datetime) - if operation_data['parameters']['state_indice']: - logging.critical(f'Checking vulnerability state index for {host}') - results["states_not_found"] = check_vuln_state_index(host_manager, operation_data['vulnerability_data'], current_datetime) + logging.critical(f"Results: {results}") - assert len(results["alerts_not_found"]) == 0 and len(results["states_not_found"]) == 0, \ - f"Vulnerability alerts or states not found for {host}: {results}" + if results['evidences']['alerts_not_found_from'] or len(results['evidences']['states_found_from']) > 0 or \ + results['evidences']['alerts_not_found_to'] or results['evidences']['states_not_found_to']: + results['checks']['all_successfull'] = False + else: + results['checks']['all_successfull'] = True + + return { + f"{host}": results + } def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict], host_manager: HostManager): @@ -152,10 +376,21 @@ def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict if task_list: for task in task_list: - launch_remote_operation(agent, task, host_manager, timestamp) + operation = task['operation'] + if operation in locals(): + locals()[operation](agent, task, host_manager, timestamp) + + +def launch_remote_operation(host: str, operation_data: Dict[str, Dict], host_manager: HostManager): + operation = operation_data['operation'] + if operation in globals(): + operation_result = globals()[operation](host, operation_data, host_manager) + return operation_result + else: + raise ValueError(f"Operation {operation} not recognized") -def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager): +def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager, target_to_ignore: []): """ Launch parallel remote operations on multiple hosts. @@ -163,14 +398,24 @@ def launch_parallel_operations(task_list: List[Dict], host_manager: HostManager) task_list (list): List of dictionaries containing operation details. host_manager (HostManager): An instance of the HostManager class containing information about hosts. """ - for task in task_list: - logging.critical(f"Launching parallel task: {task}") - parallel_configuration = [] - target = task['target'] + results = {} + + if target_to_ignore: + for target in results: + results[target]['checks']['all_successfull'] = False + + def launch_and_store_result(args): + host, task, manager = args + result = launch_remote_operation(host, task, manager) + results.update(result) + + with ThreadPoolExecutor() as executor: + # Submit tasks asynchronously + futures = [executor.submit(launch_and_store_result, (host, task, host_manager)) + for task in task_list for host in host_manager.get_group_hosts(task['target'] - target_to_ignore)] - for host in host_manager.get_group_hosts(target): - parallel_configuration.append((host, task, host_manager)) + # Wait for all tasks to complete + for future in futures: + future.result() - with ThreadPool() as pool: - # Use the pool to map the function to the list of hosts - pool.starmap(launch_remote_operation, parallel_configuration) + return results diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py index 4d428100bf..a124aee7ab 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py @@ -6,7 +6,8 @@ import re -def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[str, Dict], current_datetime: str = None): +def check_vuln_state_index(host_manager: HostManager, host: str, package: Dict[str, Dict], + current_datetime: str = "", return_found: bool = False): """ Check vulnerability state index for a host. @@ -20,49 +21,35 @@ def check_vuln_state_index(host_manager: HostManager, vulnerability_data: Dict[s index_vuln_state_content = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', greater_than_timestamp=current_datetime)['hits']['hits'] expected_alerts_not_found = [] + expected_alerts_found = [] - logging.critical(f"Checking vulnerability state index {vulnerability_data}") + logging.critical(f"Checking vulnerability state index {package}") + vulnerabilities = package['CVE'] - for agent in host_manager.get_group_hosts('agent'): - logging.critical(f"Checking vulnerability state index for {agent}") + for vulnerability in vulnerabilities: + found = False + for indice_vuln in index_vuln_state_content: + state_agent = indice_vuln['_source']['agent']['name'] + state_cve = indice_vuln["_source"]['vulnerability']['id'] + state_package_name = indice_vuln['_source']['package']['name'] + state_package_version = indice_vuln['_source']['package']['version'] - host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] - host_os_arch = host_manager.get_host_variables(agent)['architecture'] + if state_agent == host and state_cve == vulnerability \ + and state_package_name == package['package_name'] and \ + state_package_version == package['package_version']: + found = True + expected_alerts_found.append(vulnerability) - logging.critical(f"Host OS name: {host_os_name}") - logging.critical(f"Host OS arch: {host_os_arch}") - - if host_os_name in vulnerability_data: - if host_os_arch in vulnerability_data[host_os_name]: - logging.critical(f"Inside Host OS arch: {host_os_arch}") - - vulnerabilities = vulnerability_data[host_os_name][host_os_arch] - - logging.critical(f"Vulnerabilities: {vulnerabilities}") - - for vulnerability in vulnerabilities: - found = False - for indice_vuln in index_vuln_state_content: - logging.critical(f"Indice vuln: {indice_vuln}") - - state_agent = indice_vuln['_source']['agent']['name'] - state_cve = indice_vuln["_source"]['vulnerability']['id'] - state_package_name = indice_vuln['_source']['package']['name'] - state_package_version = indice_vuln['_source']['package']['version'] - - if state_agent == agent and state_cve == vulnerability['CVE'] \ - and state_package_name == vulnerability['PACKAGE_NAME'] and \ - state_package_version == vulnerability['PACKAGE_VERSION']: - found = True - - if not found: - expected_alerts_not_found.append(vulnerability) + if not found: + expected_alerts_not_found.append(vulnerability) logging.critical(f"Expected alerts not found: {expected_alerts_not_found}") logging.critical(f"Triggered alerts: {index_vuln_state_content}") - return expected_alerts_not_found - + if return_found: + return expected_alerts_found + else: + return expected_alerts_not_found def get_alerts_by_agent(alerts, regex): @@ -84,9 +71,9 @@ def get_alerts_by_agent(alerts, regex): agent = alert['_source']['agent']['name'] if agent not in alerts_vuln_by_agent: alerts_vuln_by_agent[agent] = [] - else: - alerts_vuln_by_agent[agent].append(alert) + alerts_vuln_by_agent[agent].append(alert) + logging.critical(f"Alerts by agent: {alerts_vuln_by_agent}") return alerts_vuln_by_agent @@ -103,8 +90,8 @@ def get_indexed_vulnerabilities_by_agent(indexed_vulnerabilities): return vulnerabilities_by_agent -def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict[str, Dict], - current_datetime: str = ''): +def check_vuln_alert_indexer(host_manager: HostManager, host: str, package: Dict[str, Dict], + current_datetime: str = '', vuln_mitigated: bool = False): """ Check vulnerability alerts in the indexer for a host. @@ -115,57 +102,47 @@ def check_vuln_alert_indexer(host_manager: HostManager, vulnerability_data: Dict Returns: list: List of vulnerability alerts. """ - regex_cve_affects = "CVE.* affects .*" - regex_solved_vuln = "The .* that affected .* was solved due to a package removal" + logging.critical(f"Checking vulnerability alerts in the indexer {package}") - logging.critical(f"Checking vulnerability alerts in the indexer {vulnerability_data}") + regex_to_match = "CVE.* affects .*" + if vuln_mitigated: + regex_to_match = "The .* that affected .* was solved due to a package removal" indexer_alerts = get_indexer_values(host_manager, greater_than_timestamp=current_datetime)['hits']['hits'] - # Get CVE affects alerts for all agents - detected_vuln_alerts_by_agent = get_alerts_by_agent(indexer_alerts, regex_cve_affects) - solved_alerts_by_agent = get_alerts_by_agent(indexer_alerts, regex_solved_vuln) + alerts_global = get_alerts_by_agent(indexer_alerts, regex_to_match) + + if host in alerts_global: + triggered_alerts = alerts_global[host] + else: + triggered_alerts = [] + + logging.critical(f"Triggered alerts: {triggered_alerts}") - triggered_alerts = detected_vuln_alerts_by_agent expected_alerts_not_found = [] - if 'state' in vulnerability_data and not vulnerability_data['state']: - triggered_alerts = solved_alerts_by_agent - - for agent in host_manager.get_group_hosts('agent'): - logging.critical(f"Checking vulnerability alerts for {agent}") - host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0] - host_os_arch = host_manager.get_host_variables(agent)['architecture'] - logging.critical(f"Host OS name: {host_os_name}") - logging.critical(f"Host OS arch: {host_os_arch}") - logging.critical(f"Check1: {host_os_arch in vulnerability_data}") - logging.critical(f"Check2: {host_os_name in vulnerability_data}") - - if host_os_name in vulnerability_data: - if host_os_arch in vulnerability_data[host_os_name]: - logging.critical(f"Inside Host OS arch: {host_os_arch}") - vulnerabilities = vulnerability_data[host_os_name][host_os_arch] - for vulnerability in vulnerabilities: - - logging.critical(f"Checking vulnerability: {vulnerability}") - - cve = vulnerability['CVE'] - package = vulnerability['PACKAGE_NAME'] - version = vulnerability['PACKAGE_VERSION'] - found = False - for triggered_alert in triggered_alerts[agent]: - alert_package_name = triggered_alert['_source']['data']['vulnerability']['package']["name"] - alert_package_version = \ - triggered_alert['_source']['data']['vulnerability']['package']['version'] - alert_cve = triggered_alert['_source']['data']['vulnerability']['cve'] - - if alert_cve == cve and alert_package_name == package and \ - alert_package_version == version: - found = True - - if not found: - print(f"Vulnerability not found: {vulnerability}") - expected_alerts_not_found.append(vulnerability) + for cve in package['CVE']: + logging.critical(f"Checking vulnerability: {cve}") + + package_name = package['package_name'] + package_version = package['package_version'] + + found = False + + for triggered_alert in triggered_alerts: + alert_package_name = triggered_alert['_source']['data']['vulnerability']['package']["name"] + alert_package_version = \ + triggered_alert['_source']['data']['vulnerability']['package']['version'] + alert_cve = triggered_alert['_source']['data']['vulnerability']['cve'] + + if alert_cve == cve and alert_package_name == package_name and \ + alert_package_version == package_version: + found = True + + if not found: + print(f"Vulnerability not found: {cve} for package {package} {package_version}") + expected_alerts_not_found.append({'CVE': cve, 'PACKAGE_NAME': package_name, + 'PACKAGE_VERSION': package_version}) logging.critical(f"Expected alerts not found: {expected_alerts_not_found}") logging.critical(f"Triggered alerts: {triggered_alerts}") diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json new file mode 100644 index 0000000000..faefd51aa2 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json @@ -0,0 +1,108 @@ +{ + "nmap-6.46": { + "package_name": "nmap", + "package_version": "6.46-1", + "CVE": ["CVE-2018-15173"], + "urls": { + "centos": { + "amd64": "https://nmap.org/dist/nmap-6.46-1.x86_64.rpm" + } + }, + "uninstall_name": "nmap*" + }, + "grafana-8.5.5": { + "package_name": "grafana", + "package_version": "8.5.5", + "CVE": ["CVE-2023-2183"], + "urls": { + "centos": { + "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.aarch64.rpm" + } + }, + "uninstall_name": "grafana*" + }, + + + https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb + https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb + https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb + https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_arm64.deb + https://dl.grafana.com/enterprise/release/grafana-enterprise_9.4.17_arm64.deb + https://dl.grafana.com/enterprise/release/grafana-enterprise_9.5.13_arm64.deb + https://dl.grafana.com/enterprise/release/grafana-enterprise_10.0.0_arm64.deb + + + + + + + + + + + + + + + + + + + "vlc-3.0.6": { + "package_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe", + "package_version": "3.0.6", + "CVE": ["CVE-2019-12874"], + "urls": { + "windows": { + "amd64": "https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe" + } + }, + "uninstall_name": "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" + }, + "node-17.0.1": { + "package_name": "node", + "package_version": "17.0.1", + "CVE": ["CVE-2022-21824"], + "urls": { + "macos": { + "amd64": "https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg", + "arm64v8": "https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg" + } + }, + "uninstall_name": "node*" + }, + "lynx-2.8.8":{ + "package_name": "lynx", + "package_version": "2.8.8-0.3.dev15.el7", + "CVE": ["CVE-2021-38165"], + "urls": { + "centos": { + "amd64": "https://download.cf.centos.org/centos/7/os/x86_64/Packages/lynx-2.8.8-0.3.dev15.el7.x86_64.rpm" + } + }, + "uninstall_name": "lynx*" + }, + "firefox-78.9.0":{ + "package_name": "firefox", + "package_version": "78.9.0-1.el7.centos", + "CVE": ["CVE-2023-6873", "CVE-2023-6872", "CVE-2022-38478"], + "urls": { + "centos": { + "amd64": "https://download.cf.centos.org/centos/7/updates/x86_64/Packages/firefox-78.9.0-1.el7.centos.x86_64.rpm" + } + }, + "uninstall_name": "firefox*" + }, + "firefox-91.13.0":{ + "package_name": "firefox", + "package_version": "91.13.0-1.el7.centos", + "CVE": ["CVE-2023-6873", "CVE-2023-6872"], + "urls": { + "centos": { + "amd64": "https://download.cf.centos.org/centos/7/updates/x86_64/Packages/firefox-91.13.0-1.el7.centos.x86_64.rpm" + } + }, + "uninstall_name": "firefox*" + } +} diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index a1719b0abd..02f4ea38fa 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -553,14 +553,21 @@ def remove_package(self, host, package_name, system): Example: host_manager.remove_package('my_host', 'my_package', system='ubuntu') """ + logging.critical(f"Removing package {package_name} from {host}") + logging.critical(f"System: {system}") + logging.critical(f"Host variables: {self.get_host_variables(host)}") + result = False + os_name = self.get_host_variables(host)['os_name'] if os_name == 'windows': result = self.get_host(host).ansible("win_command", f"& '{package_name}' /S", check=False) elif os_name == 'linux': os = self.get_host_variables(host)['os'].split('_')[0] if os == 'centos': + logging.critical(f"Centos!") result = self.get_host(host).ansible("yum", f"name={package_name} state=absent", check=False) + logging.critical(f"Result: {result}") elif os == 'ubuntu': result = self.get_host(host).ansible("apt", f"name={package_name} state=absent", check=False) diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 20d7cbbab6..3d3aabcd65 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -1,66 +1,103 @@ -- case: "Installation of a vulnerable package" - id: "install_package" - description: "Installation of a vulnerable package" - preconditions: null - body: - tasks: +#- case: "Installation of a vulnerable package" +# id: "install_package" +# description: "Installation of a vulnerable package" +# preconditions: null +# body: +# tasks: +# - operation: install_package +# target: agent +# check: +# alerts: True +# state_index: True +# package: +# centos: +# # amd64: nmap-6.46 +# amd64: lynx-2.8.8 +# arm64v8: grafana-enterprise-8.5.5 +# ubuntu: +# amd64: grafana-enterprise-8.5.5 +# windows: +# amd64: vlc-3.0.6 +# macos: +# amd64: node-v17.0.1 + +- case: "Upgrade" + id: "upgrade_package" + description: "Upgrade of a vulnerable package" + preconditions: + tasks: - operation: install_package target: agent + check: + alerts: True + state_index: True package: centos: - # amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.x86_64.rpm - amd64: https://nmap.org/dist/nmap-6.46-1.x86_64.rpm - arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.aarch64.rpm + amd64: firefox-78.9.0 + arm64v8: grafana-enterprise-8.5.5 ubuntu: - amd64: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb - arm64v8: https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb + amd64: grafana-enterprise-8.5.5 windows: - amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe + amd64: vlc-3.0.6 macos: - amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg - arm64v8: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg - - - operation: check_agent_vulnerability + amd64: node-v17.0.1 + arm64v8: node-v17.0.1 + body: + tasks: + - operation: update_package target: agent - parameters: - alert_indexed: True - state_indice: True - vulnerability_data: - centos: - amd64: - # - PACKAGE_NAME: "grafana" - # PACKAGE_VERSION: "8.5.5" - # CVE: CVE-2023-2183 - - PACKAGE_NAME: "nmap" - PACKAGE_VERSION: "6.46-1" - CVE: CVE-2018-15173 - arm64v8: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" - CVE: CVE-2023-2183 - ubuntu: - amd64: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" - CVE: CVE-2023-2183 - arm64v8: - - PACKAGE_NAME: "grafana" - PACKAGE_VERSION: "8.5.5" - CVE: CVE-2023-2183 - windows: - amd64: - - PACKAGE_NAME: "C:\\Program Files\\VideoLAN\\VLC\\uninstall.exe" - PACKAGE_VERSION: "3.0.6" - CVE: CVE-2019-12874 - macos: - amd64: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.0.1" - CVE: CVE-2022-21824 - arm64v8: - - PACKAGE_NAME: "node" - PACKAGE_VERSION: "17.0.1" - CVE: CVE-2022-21824 + check: + alerts: True + state_index: True + package: + from: + centos: + amd64: firefox-78.9.0 + arm64v8: grafana-enterprise-8.5.5 + ubuntu: + amd64: grafana-enterprise-8.5.5 + windows: + amd64: vlc-3.0.6 + macos: + amd64: node-v17.0.1 + arm64v8: node-v17.0.1 + to: + centos: + # amd64: nmap-6.46 + amd64: firefox-91.13.0 + arm64v8: grafana-enterprise-8.5.5 + ubuntu: + amd64: grafana-enterprise-8.5.5 + windows: + amd64: vlc-3.0.6 + macos: + amd64: node-v17.0.1 + arm64v8: node-v17.0.1 + + +# - case: "Removal of a vulnerable package" +# id: "remove_package" +# description: "Remove of a vulnerable package" +# preconditions: null +# body: +# tasks: +# - operation: remove_package +# target: agent +# check: +# alerts: True +# state_index: True +# package: +# centos: +# # amd64: nmap-6.46 +# amd64: lynx-2.8.8 +# arm64v8: grafana-enterprise-8.5.5 +# ubuntu: +# amd64: grafana-enterprise-8.5.5 +# windows: +# amd64: vlc-3.0.6 +# macos: +# amd64: node-v17.0.1 +# arm64v8: node-v17.0.1 # - case: "Upgrade of a vulnerable package: Remain vulnerable" diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index 4893a92fef..07e48f993a 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -50,6 +50,8 @@ def collect_evidences(test_name, host_manager, evidences) -> None: current_dir = os.path.dirname(__file__) vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") tests_evidences_directory = os.path.join(str(vulnerability_detector_logs_dir), str(test_name)) + logging.critical(f"Collecting evidences for {test_name}") + logging.critical(evidences) if evidences: logging.info(f"Collecting custom evidences for {test_name}") @@ -105,14 +107,30 @@ def setup(preconditions, teardown, host_manager): """ """ if preconditions: - print("Configuyring preconditions") - launch_parallel_operations(preconditions['tasks'], host_manager) + result = launch_parallel_operations(preconditions['tasks'], host_manager) + + + for host in result.keys(): + if result[host]['checks']['all_successfull'] is False: + success_for_all_agents = False + logging.critical(f"Test failed for host {host}. Check logs for more information") + logging.critical(f"Evidences: {result[host]['evidences']}") - yield + + yield result if teardown: - print("Configuring teardonw") - launch_parallel_operations(teardown, host_manager) + result = launch_parallel_operations(teardown, host_manager) + + for host in result.keys(): + if result[host]['checks']['all_successfull'] is False: + success_for_all_agents = False + logging.critical(f"Test failed for host {host}. Check logs for more information") + logging.critical(f"Evidences: {result[host]['evidences']}") + + + + @pytest.fixture(scope='session', autouse=True) @@ -122,7 +140,7 @@ def handle_logs(): os.makedirs(logs_dir, exist_ok=True) yield - + shutil.rmtree(logs_dir, ignore_errors=True) @@ -235,7 +253,8 @@ def pytest_runtest_makereport(item, call): if 'host_manager' in item.funcargs: evidences = None if 'get_results' in item.funcargs: - evidences = item.funcargs['get_results'] + test_result = item.funcargs['get_results'] + evidences = test_result.get_evidences() collect_evidences(item._request.node.name, item.funcargs['host_manager'], evidences) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 1f66bd71b8..6cb8127a66 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -136,6 +136,9 @@ def __init__(self): } } + def get_evidences(self): + return self.evidences + def summary(self): """Print a summary of the results of the tests""" if any(self.checks.values()): @@ -598,7 +601,7 @@ def tests_syscollector_first_second_scan_consistency_alerts(self, host_manager, if not results.checks['vulnerabilities_equal_between_scans']: pytest.fail("Test failed. Check logs for more information") - def tests_syscollector_first_second_scan_consistency_index(self, host_manager, setup_vulnerability_tests, + def tests_syscollector_first_second_scan_consistency_index(self, host_manager, setup_vulnerability_tests, get_results): results = get_results test_name = 'tests_syscollector_first_second_scan_consistency_index' @@ -644,13 +647,38 @@ def tests_syscollector_first_second_scan_consistency_index(self, host_manager, s depends=case['depends']) for case in cases] list_ids = [case['id'] for case in cases] +class TestScanSyscollectorCases(): + results = {} + + @pytest.fixture(scope='class') + def get_results(self): + return self.results + + + @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) + #def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager): + def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager, get_results): + setup_results = setup + results = get_results + + if len(results_) + + + + logger.critical("Starting scan cases tests") + logger.critical(f"Case Info: {body}") + + setup_results = setup + + # Launch tests tasks + result = launch_parallel_operations(body['tasks'], host_manager, setup_results.keys()) + + success_for_all_agents = True -@pytest.mark.dependency() -@pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) -#def test_vulnerability_detector_scans_cases(setup_vulnerability_tests, preconditions, body, teardown, setup, host_manager): -def test_vulnerability_detector_scans_cases(preconditions, body, teardown, setup, host_manager): - logger.critical("Starting scan cases tests") - logger.critical(f"Case Info: {body}") + for host in result.keys(): + if result[host]['checks']['all_successfu1ll'] is False: + success_for_all_agents = False + logger.critical(f"Test failed for host {host}. Check logs for more information") + logger.critical(f"Evidences: {result[host]['evidences']}") - # Launch tests tasks - launch_parallel_operations(body['tasks'], host_manager) + assert success_for_all_agents is True, "Test failed. Check logs for more information"