From a404dee7c08d24c9d03bc9691f7db3d6800a0f52 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Fri, 16 Feb 2024 16:34:45 +0100 Subject: [PATCH 01/20] add params to wazuh_statistics --- .../wazuh_testing/scripts/wazuh_statistics.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py index 52fc31308c..034389bf9b 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py @@ -26,13 +26,16 @@ def get_script_arguments(): formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-t', '--target', dest='target_list', required=True, type=str, nargs='+', action='store', help='Type the statistics target to collect separated by whitespace. ' - 'Targets: agent, logcollector, remote and analysis.') + 'Targets: agent, logcollector, remoted, analysis-events and analysisd-state.') parser.add_argument('-s', '--sleep', dest='sleep_time', type=float, default=5, action='store', help='Type the time in seconds between each entry.') parser.add_argument('-d', '--debug', dest='debug', action='store_true', default=False, help='Enable debug level logging.') parser.add_argument('--store', dest='store_path', action='store', default=gettempdir(), help=f"Path to store the CSVs with the data. Default {gettempdir()}.") + parser.add_argument('-a', '--use_api', dest='use_api', type=bool, action='store', default=False, + help="Determine if the API should be used to collect the data. Default False." + "For remoted set to True to get data from API. analysis_events uses API by default.") return parser.parse_args() @@ -51,10 +54,10 @@ def main(): logger.info(f'Started new session: {CURRENT_SESSION}') for target in options.target_list: - monitor = StatisticMonitor(target=target, time_step=options.sleep_time, dst_dir=options.store_path) - monitor.start() + monitor = StatisticMonitor(target=target, time_step=options.sleep_time, dst_dir=options.store_path, use_api=options.use_api) MONITOR_LIST.append(monitor) + monitor.start() if __name__ == '__main__': - main() + main() \ No newline at end of file From 197f6e5fab3b406d4ae71b066f3800da7cf033a1 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Fri, 16 Feb 2024 16:35:04 +0100 Subject: [PATCH 02/20] add api support to statistic.py --- .../tools/performance/statistic.py | 478 +++++++++++++++++- 1 file changed, 455 insertions(+), 23 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index ce1f75890c..da6bad3323 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -1,3 +1,6 @@ + + + # Copyright (C) 2015-2021, Wazuh Inc. # Created by Wazuh, Inc. . # This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 @@ -5,17 +8,23 @@ import csv import json import logging +import requests +import urllib3 +import wazuh_testing.tools as tls from datetime import datetime from os.path import basename, isfile, join, splitext from re import sub from tempfile import gettempdir from threading import Thread, Event -from time import sleep +from time import sleep, time -import wazuh_testing.tools as tls +urllib3.disable_warnings() + +API_URL="https://localhost:55000" +DAEMONS_ENDPOINT="/manager/daemons/stats?daemons_list=wazuh-analysisd,wazuh-remoted,wazuh-db" +TOKEN_ENDPOINT="/security/user/authenticate" logger = logging.getLogger('wazuh-statistics-monitor') -logger.setLevel(logging.INFO) class StatisticMonitor: @@ -32,6 +41,7 @@ class StatisticMonitor: time_step (int): Time between intervals. target (str, optional): target file to monitor. dst_dir (str, optional): path to store the file. + use_api (bool, optional): Determine if the API should be used to collect the data. Default False. Attributes: event (thread.Event): thread Event used to control the scans. @@ -40,44 +50,56 @@ class StatisticMonitor: dst_dir (str): directory to store the CSVs. Defaults to temp directory. csv_file (str): path to the CSV file. target (str): target file to monitor. + parse_json (bool): Determine if the file is a JSON file. Default False. """ - def __init__(self, target='agent', time_step=5, dst_dir=gettempdir()): + def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_api=False): self.event = None self.thread = None self.time_step = time_step self.target = target self.dst_dir = dst_dir + self.use_api = use_api self.parse_json = False + if self.use_api == True and self.target != 'analysis_events' and self.target != 'remoted': + self.use_api = False + if self.target == 'agent': self.statistics_file = tls.AGENT_STATISTICS_FILE elif self.target == 'logcollector': self.statistics_file = tls.LOGCOLLECTOR_STATISTICS_FILE self.parse_json = True - elif self.target == 'remote': + elif self.target == 'remoted': self.statistics_file = tls.REMOTE_STATISTICS_FILE - elif self.target == 'analysis': + elif self.target == 'analysis_state': self.statistics_file = tls.ANALYSIS_STATISTICS_FILE + elif self.target == 'analysis_events': + self.use_api = True else: raise ValueError(f'The target {self.target} is not a valid one.') - state_file = splitext(basename(self.statistics_file))[0] - self.csv_file = join(self.dst_dir, f'{state_file}_stats.csv') + if self.use_api == False: + state_file = splitext(basename(self.statistics_file))[0] + self.csv_file = join(self.dst_dir, f'{state_file}_stats.csv') + else: + self.csv_file = join(self.dst_dir, f'wazuh-{self.target}_api_stats.csv') + - def _parse_classic_state_file(self, data): + def _parse_classic_state_file(self, data, target): """Parse the info from the .state files from Wazuh with shell compatible format. Args: data (dict): dictionary to store the data of the file. + target (string): specifies which CSV must be generated """ with open(self.statistics_file) as state_file: for line in state_file: if line.rstrip() and line.rstrip()[0] != '#': key, value = line.splitlines()[0].split('=') data[key] = value.split("'")[1] + self._write_csv(data, target, self.csv_file) - self._write_csv(data, self.csv_file) def _parse_logcollector_state_file(self, data): """Parse the info from the .state files from Wazuh with shell compatible format. @@ -87,7 +109,6 @@ def _parse_logcollector_state_file(self, data): """ with open(self.statistics_file) as state_file: state_info = json.load(state_file) - for file in state_info['global']['files']: if isfile(file['location']): csv_name = sub(r'\.', '_', basename(file['location'])) @@ -101,53 +122,464 @@ def _parse_logcollector_state_file(self, data): file_data['bytes'] = file['bytes'] file_data['target'] = target['name'] file_data['target_drops'] = target['drops'] - self._write_csv(file_data, join(self.dst_dir, f'{csv_name}.csv')) + self._write_csv(file_data, 'logcollector', join(self.dst_dir, f'{csv_name}.csv')) + def _parse_state_file(self): """Read the data from the statistics file generated by Wazuh.""" try: logging.info("Getting statistics data from {}".format(self.statistics_file)) - data = {'Timestamp': datetime.now().strftime('%Y/%m/%d %H:%M:%S')} + data = {} if not self.parse_json: - self._parse_classic_state_file(data) + self._parse_classic_state_file(data, self.target) else: self._parse_logcollector_state_file(data) except Exception as e: logger.error(f'Exception with {self.statistics_file} | {str(e)}') - @staticmethod - def _write_csv(data, csv_file): + + def _parse_api_data(self): + """Read the data from the statistics file generated by Wazuh API.""" + + logging.info("Getting statistics data from API for {}".format(self.target)) + + response = requests.get(API_URL + TOKEN_ENDPOINT, verify=False, auth=requests.auth.HTTPBasicAuth("wazuh", "wazuh")) + if response.status_code != 200: + logging.info("Retrying get API data, status code {}".format(response.status_code)) + return self._parse_api_data() + + daemons_response = requests.get(API_URL + DAEMONS_ENDPOINT, verify=False, headers={'Authorization': 'Bearer ' + response.json()['data']['token']}) + if daemons_response.status_code != 200: + logging.info("Retrying get API data, status code {}".format(response.status_code)) + return self._parse_api_data() + + data = daemons_response.json()['data']['affected_items'] + self._write_csv(data, self.target, self.csv_file) + + + def _write_csv(self, data, target, csv_file): """Write the data collected from the .state into a CSV file. Args: data (dict): dictionary containing the info from the .state file. + target (string): specifies which CSV must be generated csv_file (string): path to the CSV file. """ + + analysisd_events_header = [ + "Timestamp", + "API Timestamp", + "Interval (Timestamp-Uptime)", + "Events processed", + "Events received", + + "Decoded from azure", + "Decoded from ciscat", + "Decoded from command", + "Decoded from docker", + "Decoded from logcollector eventchannel", + "Decoded from logcollector eventlog", + "Decoded from logcollector macos", + "Decoded from logcollector others", + "Decoded from osquery", + "Decoded from rootcheck", + "Decoded from sca", + "Decoded from syscheck", + "Decoded from syscollector", + "Decoded from vulnerability", + "Decoded from agentd", + "Decoded from dbsync", + "Decoded from monitor", + "Decoded from remote", + + "Dropped from azure", + "Dropped from ciscat", + "Dropped from command", + "Dropped from docker", + "Dropped from logcollector eventchannel", + "Dropped from logcollector eventlog", + "Dropped from logcollector macos", + "Dropped from logcollector others", + "Dropped from osquery", + "Dropped from rootcheck", + "Dropped from sca", + "Dropped from syscheck", + "Dropped from syscollector", + "Dropped from vulnerability", + "Dropped from agentd", + "Dropped from dbsync", + "Dropped from monitor", + "Dropped from remote", + + "Written alerts", + "Written archives", + "Written firewall", + "Written fts", + "Written stats", + + "EDPS from azure", + "EDPS from ciscat", + "EDPS from command", + "EDPS from docker", + "EDPS from logcollector eventchannel", + "EDPS from logcollector eventlog", + "EDPS from logcollector macos", + "EDPS from logcollector others", + "EDPS from osquery", + "EDPS from rootcheck", + "EDPS from sca", + "EDPS from syscheck", + "EDPS from syscollector", + "EDPS from vulnerability", + "EDPS from agentd", + "EDPS from dbsync", + "EDPS from monitor", + "EDPS from remote"] + + analysisd_header = [ + "Timestamp", + "Total Events", + "Syscheck Events Decoded", + "Syscollector Events Decoded", + "Rootcheck Events Decoded", + "SCA Events Decoded", + "WinEvt Events Decoded", + "DBSync Messages dispatched", + "Other Events Decoded", + "Events processed (Rule matching)", + "Events received", + "Events dropped", + "Alerts written", + "Firewall alerts written", + "FTS alerts written", + "Syscheck queue usage", + "Syscheck queue size", + "Syscollector queue usage", + "Syscollector queue size", + "Rootcheck queue usage", + "Rootcheck queue size", + "SCA queue usage", + "SCA queue size", + "Hostinfo queue usage", + "Hostinfo queue size", + "Winevt queue usage", + "Winevt queue size", + "DBSync queue usage", + "DBSync queue size", + "Upgrade queue usage", + "Upgrade queue size", + "Event queue usage", + "Event queue size", + "Rule matching queue usage", + "Rule matching queue size", + "Alerts log queue usage", + "Alerts log queue size", + "Firewall log queue usage", + "Firewall log queue size", + "Statistical log queue usage", + "Statistical log queue size", + "Archives log queue usage", + "Archives log queue size"] + + logcollector_header = [ + "Timestamp", + "Location", + "Events", + "Bytes", + "Target", + "Target Drops"] + + remoted_header = [ + "Timestamp", + "Queue size", + "Total Queue size", + "TCP sessions", + "Events count", + "Control messages", + "Discarded messages", + "Bytes received"] + + remoted_api_header = [ + "Timestamp", + "API Timestamp", + "Interval (Timestamp-Uptime)", + "Queue size", + "Queue usage", + "TCP sessions", + "Keys reload count", + + "Control messages", + "Control keepalives", + "Control requests", + "Control shutdown", + "Control startup", + + "Dequeued messages", + "Discarded messages", + "Events count", + "Ping messages", + "Unknown messages", + + "Sent ack", + "Sent ar", + "Sent discarded", + "Sent request", + "Sent sca", + "Sent shared", + + "Metrics-Bytes received", + "Metrics-Bytes sent"] + + agentd_header = ["Timestamp", "Status", "Last Keepalive", "Last ACK", "Number of generated events", + "Number of messages", "Number of events buffered"] + + + if target == "analysis_state": + csv_header = analysisd_header + elif target == "analysis_events": + csv_header = analysisd_events_header + elif target == "logcollector": + csv_header = logcollector_header + elif target == "remoted": + csv_header = remoted_api_header if self.use_api == True else remoted_header + else: + csv_header = agentd_header + header = not isfile(csv_file) - with open(csv_file, 'a', newline='') as f: - csv_writer = csv.writer(f) + + with open(csv_file, 'a+') as log: + if header: - csv_writer.writerow(list(data)) + log.write(f'{",".join(csv_header)}\n') + + timestamp = datetime.fromtimestamp(time()).strftime('%Y-%m-%d %H:%M:%S') + + if self.use_api: + + ## Get data from API response + if target == "analysis_events": + data = data[0] + elif target == "remoted": + data = data[1] + + format = r"%Y-%m-%dT%H:%M:%S+%f:00" + datetime_timestamp = datetime.strptime(data['timestamp'], format) + datetime_uptime = datetime.strptime(data['uptime'], format) + interval = (datetime_timestamp - datetime_uptime).total_seconds() + + if target == "analysis_events": + metrics = data['metrics'] + decoded = metrics['events']['received_breakdown']['decoded_breakdown'] + decoded_modules = decoded['modules_breakdown'] + dropped = metrics['events']['received_breakdown']['dropped_breakdown'] + dropped_modules = dropped['modules_breakdown'] + written = metrics['events']['written_breakdown'] + + logger.info("Writing Analysisd events info to {}.".format(csv_file)) + log.write(("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},{19},{20},"+ + "{21},{22},{23},{24},{25},{26},{27},{28},{29},{30},{31},{32},{33},{34},{35},{36},{37},{38},{39},{40},{41},"+ + "{42},{43},{44},{45},{46},{47},{48},{49},{50},{51},{52},{53},{54},{55},{56},{57},{58},{59},{60},{61},{62},"+ + "{63}\n").format( + timestamp, # 0 + data['timestamp'], # 1 + interval, # 2 + metrics['events']['processed'], # 3 + metrics['events']['received'], # 4 + decoded_modules['azure'], # 5 + decoded_modules['ciscat'], # 6 + decoded_modules['command'], # 7 + decoded_modules['docker'], # 8 + decoded_modules['logcollector_breakdown']['eventchannel'], # 9 + decoded_modules['logcollector_breakdown']['eventlog'], # 10 + decoded_modules['logcollector_breakdown']['macos'], # 11 + decoded_modules['logcollector_breakdown']['others'], # 12 + decoded_modules['osquery'], # 13 + decoded_modules['rootcheck'], # 14 + decoded_modules['sca'], # 15 + decoded_modules['syscheck'], # 16 + decoded_modules['syscollector'], # 17 + decoded_modules['vulnerability'], # 18 + decoded['agent'], # 19 + decoded['dbsync'], # 20 + decoded['monitor'], # 21 + decoded['remote'], # 22 + + dropped_modules['azure'], # 23 + dropped_modules['ciscat'], # 24 + dropped_modules['command'], # 25 + dropped_modules['docker'], # 26 + dropped_modules['logcollector_breakdown']['eventchannel'], # 27 + dropped_modules['logcollector_breakdown']['eventlog'], # 28 + dropped_modules['logcollector_breakdown']['macos'], # 29 + dropped_modules['logcollector_breakdown']['others'], # 30 + dropped_modules['osquery'], # 31 + dropped_modules['rootcheck'], # 32 + dropped_modules['sca'], # 33 + dropped_modules['syscheck'], # 34 + dropped_modules['syscollector'], # 35 + dropped_modules['vulnerability'], # 36 + dropped['agent'], # 37 + dropped['dbsync'], # 38 + dropped['monitor'], # 39 + dropped['remote'], # 40 + + written['alerts'], # 41 + written['archives'], # 42 + written['firewall'], # 43 + written['fts'], # 44 + written['stats'], # 45 + + decoded_modules['azure'] / interval, # 46 + decoded_modules['ciscat'] / interval, # 47 + decoded_modules['command'] / interval, # 48 + decoded_modules['docker'] / interval, # 49 + decoded_modules['logcollector_breakdown']['eventchannel'] / interval, # 50 + decoded_modules['logcollector_breakdown']['eventlog'] / interval, # 51 + decoded_modules['logcollector_breakdown']['macos'] / interval, # 52 + decoded_modules['logcollector_breakdown']['others'] / interval, # 53 + decoded_modules['osquery'] / interval, # 54 + decoded_modules['rootcheck'] / interval, # 55 + decoded_modules['sca'] / interval, # 56 + decoded_modules['syscheck'] / interval, # 57 + decoded_modules['syscollector'] / interval, # 58 + decoded_modules['vulnerability'] / interval, # 59 + decoded['agent'] / interval, # 60 + decoded['dbsync'] / interval, # 61 + decoded['monitor'] / interval, # 62 + decoded['remote'] / interval, # 63 + )) + elif target == "remoted": + metrics = data['metrics'] + received_messages = metrics['messages']['received_breakdown'] + sent_messages = metrics['messages']['sent_breakdown'] + logger.info("Writing remoted data from API info to {}.".format(csv_file)) + log.write(("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},"+ + "{19},{20},{21},{22},{23},{24}\n").format( + timestamp, # 0 + data['timestamp'], # 1 + interval, # 2 + metrics['queues']['received']['size'], # 3 + metrics['queues']['received']['usage'], # 4 + metrics['tcp_sessions'], # 5 + metrics['keys_reload_count'], # 6 + received_messages['control'], # 7 + received_messages['control_breakdown']['keepalive'], # 8 + received_messages['control_breakdown']['request'], # 9 + received_messages['control_breakdown']['shutdown'], # 10 + received_messages['control_breakdown']['startup'], # 11 + received_messages['dequeued_after'], # 12 + received_messages['discarded'], # 13 + received_messages['event'], # 14 + received_messages['ping'], # 15 + received_messages['unknown'], # 16 + sent_messages['ack'], # 17 + sent_messages['ar'], # 18 + sent_messages['discarded'], # 19 + sent_messages['request'], # 20 + sent_messages['sca'], # 21 + sent_messages['shared'], # 22 + metrics['bytes']['received'], # 23 + metrics['bytes']['sent'] # 24 + )) + else: + if target == "analysis_state": + logger.info("Writing analysisd.state info to {}.".format(csv_file)) + log.write(("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},{19},{20},"+ + "{21},{22},{23},{24},{25},{26},{27},{28},{29},{30},{31},{32},{33},{34},{35},{36},{37},{38},{39},{40},{41},{42}\n") + .format( + timestamp, # 0 + data['total_events_decoded'], # 1 + data['syscheck_events_decoded'], # 2 + data['syscollector_events_decoded'], # 3 + data['rootcheck_events_decoded'], # 4 + data['sca_events_decoded'], # 5 + data['winevt_events_decoded'], # 6 + data['dbsync_messages_dispatched'], # 7 + data['other_events_decoded'], # 8 + data['events_processed'], # 9 + data['events_received'], # 10 + data['events_dropped'], # 11 + data['alerts_written'], # 12 + data['firewall_written'], # 13 + data['fts_written'], # 14 + data['syscheck_queue_usage'], # 15 + data['syscheck_queue_size'], # 16 + data['syscollector_queue_usage'], # 17 + data['syscollector_queue_size'], # 18 + data['rootcheck_queue_usage'], # 19 + data['rootcheck_queue_size'], # 20 + data['sca_queue_usage'], # 21 + data['sca_queue_size'], # 22 + data['hostinfo_queue_usage'], # 23 + data['hostinfo_queue_size'], # 24 + data['winevt_queue_usage'], # 25 + data['winevt_queue_size'], # 26 + data['dbsync_queue_usage'], # 27 + data['dbsync_queue_size'], # 28 + data['upgrade_queue_usage'], # 29 + data['upgrade_queue_size'], # 30 + data['event_queue_usage'], # 31 + data['event_queue_size'], # 32 + data['rule_matching_queue_usage'], # 33 + data['rule_matching_queue_size'], # 34 + data['alerts_queue_usage'], # 35 + data['alerts_queue_size'], # 36 + data['firewall_queue_usage'], # 37 + data['firewall_queue_size'], # 38 + data['statistical_queue_usage'], # 39 + data['statistical_queue_size'], # 40 + data['archives_queue_usage'], # 41 + data['archives_queue_size'] # 42 + )) + elif target == "logcollector": + logger.info("Writing logcollector info to {}.".format(csv_file)) + log.write("{0},{1},{2},{3},{4},{5}\n".format( + timestamp, data['location'], data['events'], data['bytes'], data['target'], data['target_drops'])) + + elif target == "remoted": + logger.info("Writing remoted.state info to {}.".format(csv_file)) + log.write("{0},{1},{2},{3},{4},{5},{6},{7}\n".format( + timestamp, # 0 + data['queue_size'], # 1 + data['total_queue_size'], # 2 + data['tcp_sessions'], # 3 + data['evt_count'], # 4 + data['ctrl_msg_count'], # 5 + data['discarded_count'], # 6 + data['recv_bytes'] # 7 + )) + elif target == "agentd_state": + logger.info("Writing agentd.state info to {}.".format(csv_file)) + log.write("{0},{1},{2},{3},{4},{5},{6}\n".format( + timestamp, data['status'], data['last_keepalive'], + data['last_ack'], data['msg_count'], data['msg_sent'], data['msg_buffer'])) - csv_writer.writerow(list(data.values())) - logger.debug(f'Added new entry in {csv_file}') def _monitor_stats(self): """Read the .state files and log the data into a CSV file.""" while not self.event.is_set(): - self._parse_state_file() + if self.use_api: + self._parse_api_data() + else: + self._parse_state_file() sleep(self.time_step) + def run(self): """Run the event and thread monitoring functions.""" self.event = Event() self.thread = Thread(target=self._monitor_stats) self.thread.start() + def start(self): """Start the monitoring threads.""" self.run() - logger.info(f'Started monitoring statistics from {self.statistics_file}') + if self.use_api: + logger.info(f'Started monitoring statistics from API for {self.target}') + else: + logger.info(f'Started monitoring statistics from {self.statistics_file}') + def shutdown(self): """Stop all the monitoring threads.""" From cc403ecd568ae907c4c766ac06648e651d72a697 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Fri, 16 Feb 2024 17:33:55 +0100 Subject: [PATCH 03/20] move statistic headers to file --- .../tools/performance/statistic.py | 178 +----------------- .../tools/performance/statistic_headers.py | 166 ++++++++++++++++ 2 files changed, 172 insertions(+), 172 deletions(-) create mode 100644 deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index da6bad3323..0447af9102 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -17,6 +17,7 @@ from tempfile import gettempdir from threading import Thread, Event from time import sleep, time +from wazuh_testing.tools.performance import statistic_headers as headers urllib3.disable_warnings() @@ -166,184 +167,17 @@ def _write_csv(self, data, target, csv_file): csv_file (string): path to the CSV file. """ - analysisd_events_header = [ - "Timestamp", - "API Timestamp", - "Interval (Timestamp-Uptime)", - "Events processed", - "Events received", - - "Decoded from azure", - "Decoded from ciscat", - "Decoded from command", - "Decoded from docker", - "Decoded from logcollector eventchannel", - "Decoded from logcollector eventlog", - "Decoded from logcollector macos", - "Decoded from logcollector others", - "Decoded from osquery", - "Decoded from rootcheck", - "Decoded from sca", - "Decoded from syscheck", - "Decoded from syscollector", - "Decoded from vulnerability", - "Decoded from agentd", - "Decoded from dbsync", - "Decoded from monitor", - "Decoded from remote", - - "Dropped from azure", - "Dropped from ciscat", - "Dropped from command", - "Dropped from docker", - "Dropped from logcollector eventchannel", - "Dropped from logcollector eventlog", - "Dropped from logcollector macos", - "Dropped from logcollector others", - "Dropped from osquery", - "Dropped from rootcheck", - "Dropped from sca", - "Dropped from syscheck", - "Dropped from syscollector", - "Dropped from vulnerability", - "Dropped from agentd", - "Dropped from dbsync", - "Dropped from monitor", - "Dropped from remote", - - "Written alerts", - "Written archives", - "Written firewall", - "Written fts", - "Written stats", - - "EDPS from azure", - "EDPS from ciscat", - "EDPS from command", - "EDPS from docker", - "EDPS from logcollector eventchannel", - "EDPS from logcollector eventlog", - "EDPS from logcollector macos", - "EDPS from logcollector others", - "EDPS from osquery", - "EDPS from rootcheck", - "EDPS from sca", - "EDPS from syscheck", - "EDPS from syscollector", - "EDPS from vulnerability", - "EDPS from agentd", - "EDPS from dbsync", - "EDPS from monitor", - "EDPS from remote"] - - analysisd_header = [ - "Timestamp", - "Total Events", - "Syscheck Events Decoded", - "Syscollector Events Decoded", - "Rootcheck Events Decoded", - "SCA Events Decoded", - "WinEvt Events Decoded", - "DBSync Messages dispatched", - "Other Events Decoded", - "Events processed (Rule matching)", - "Events received", - "Events dropped", - "Alerts written", - "Firewall alerts written", - "FTS alerts written", - "Syscheck queue usage", - "Syscheck queue size", - "Syscollector queue usage", - "Syscollector queue size", - "Rootcheck queue usage", - "Rootcheck queue size", - "SCA queue usage", - "SCA queue size", - "Hostinfo queue usage", - "Hostinfo queue size", - "Winevt queue usage", - "Winevt queue size", - "DBSync queue usage", - "DBSync queue size", - "Upgrade queue usage", - "Upgrade queue size", - "Event queue usage", - "Event queue size", - "Rule matching queue usage", - "Rule matching queue size", - "Alerts log queue usage", - "Alerts log queue size", - "Firewall log queue usage", - "Firewall log queue size", - "Statistical log queue usage", - "Statistical log queue size", - "Archives log queue usage", - "Archives log queue size"] - - logcollector_header = [ - "Timestamp", - "Location", - "Events", - "Bytes", - "Target", - "Target Drops"] - - remoted_header = [ - "Timestamp", - "Queue size", - "Total Queue size", - "TCP sessions", - "Events count", - "Control messages", - "Discarded messages", - "Bytes received"] - - remoted_api_header = [ - "Timestamp", - "API Timestamp", - "Interval (Timestamp-Uptime)", - "Queue size", - "Queue usage", - "TCP sessions", - "Keys reload count", - - "Control messages", - "Control keepalives", - "Control requests", - "Control shutdown", - "Control startup", - - "Dequeued messages", - "Discarded messages", - "Events count", - "Ping messages", - "Unknown messages", - - "Sent ack", - "Sent ar", - "Sent discarded", - "Sent request", - "Sent sca", - "Sent shared", - - "Metrics-Bytes received", - "Metrics-Bytes sent"] - - agentd_header = ["Timestamp", "Status", "Last Keepalive", "Last ACK", "Number of generated events", - "Number of messages", "Number of events buffered"] - if target == "analysis_state": - csv_header = analysisd_header + csv_header = headers.analysisd_header elif target == "analysis_events": - csv_header = analysisd_events_header + csv_header = headers.analysisd_events_header elif target == "logcollector": - csv_header = logcollector_header + csv_header = headers.logcollector_header elif target == "remoted": - csv_header = remoted_api_header if self.use_api == True else remoted_header + csv_header = headers.remoted_api_header if self.use_api == True else headers.remoted_header else: - csv_header = agentd_header + csv_header = headers.agentd_header header = not isfile(csv_file) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py new file mode 100644 index 0000000000..2409a6b7f4 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py @@ -0,0 +1,166 @@ +analysisd_events_header = ["Timestamp", + "API Timestamp", + "Interval (Timestamp-Uptime)", + "Events processed", + "Events received", + + "Decoded from azure", + "Decoded from ciscat", + "Decoded from command", + "Decoded from docker", + "Decoded from logcollector eventchannel", + "Decoded from logcollector eventlog", + "Decoded from logcollector macos", + "Decoded from logcollector others", + "Decoded from osquery", + "Decoded from rootcheck", + "Decoded from sca", + "Decoded from syscheck", + "Decoded from syscollector", + "Decoded from vulnerability", + "Decoded from agentd", + "Decoded from dbsync", + "Decoded from monitor", + "Decoded from remote", + + "Dropped from azure", + "Dropped from ciscat", + "Dropped from command", + "Dropped from docker", + "Dropped from logcollector eventchannel", + "Dropped from logcollector eventlog", + "Dropped from logcollector macos", + "Dropped from logcollector others", + "Dropped from osquery", + "Dropped from rootcheck", + "Dropped from sca", + "Dropped from syscheck", + "Dropped from syscollector", + "Dropped from vulnerability", + "Dropped from agentd", + "Dropped from dbsync", + "Dropped from monitor", + "Dropped from remote", + + "Written alerts", + "Written archives", + "Written firewall", + "Written fts", + "Written stats", + + "EDPS from azure", + "EDPS from ciscat", + "EDPS from command", + "EDPS from docker", + "EDPS from logcollector eventchannel", + "EDPS from logcollector eventlog", + "EDPS from logcollector macos", + "EDPS from logcollector others", + "EDPS from osquery", + "EDPS from rootcheck", + "EDPS from sca", + "EDPS from syscheck", + "EDPS from syscollector", + "EDPS from vulnerability", + "EDPS from agentd", + "EDPS from dbsync", + "EDPS from monitor", + "EDPS from remote" + ] +analysisd_header = ["Timestamp", + "Total Events", + "Syscheck Events Decoded", + "Syscollector Events Decoded", + "Rootcheck Events Decoded", + "SCA Events Decoded", + "WinEvt Events Decoded", + "DBSync Messages dispatched", + "Other Events Decoded", + "Events processed (Rule matching)", + "Events received", + "Events dropped", + "Alerts written", + "Firewall alerts written", + "FTS alerts written", + "Syscheck queue usage", + "Syscheck queue size", + "Syscollector queue usage", + "Syscollector queue size", + "Rootcheck queue usage", + "Rootcheck queue size", + "SCA queue usage", + "SCA queue size", + "Hostinfo queue usage", + "Hostinfo queue size", + "Winevt queue usage", + "Winevt queue size", + "DBSync queue usage", + "DBSync queue size", + "Upgrade queue usage", + "Upgrade queue size", + "Event queue usage", + "Event queue size", + "Rule matching queue usage", + "Rule matching queue size", + "Alerts log queue usage", + "Alerts log queue size", + "Firewall log queue usage", + "Firewall log queue size", + "Statistical log queue usage", + "Statistical log queue size", + "Archives log queue usage", + "Archives log queue size" + ] +logcollector_header = ["Timestamp", + "Location", + "Events", + "Bytes", + "Target", + "Target Drops" + ] +remoted_header = ["Timestamp", + "Queue size", + "Total Queue size", + "TCP sessions", + "Events count", + "Control messages", + "Discarded messages", + "Bytes received"] + +remoted_api_header = ["Timestamp", + "API Timestamp", + "Interval (Timestamp-Uptime)", + "Queue size", + "Queue usage", + "TCP sessions", + "Keys reload count", + + "Control messages", + "Control keepalives", + "Control requests", + "Control shutdown", + "Control startup", + + "Dequeued messages", + "Discarded messages", + "Events count", + "Ping messages", + "Unknown messages", + + "Sent ack", + "Sent ar", + "Sent discarded", + "Sent request", + "Sent sca", + "Sent shared", + + "Metrics-Bytes received", + "Metrics-Bytes sent"] + +agentd_header = ["Timestamp", + "Status", + "Last Keepalive", + "Last ACK", + "Number of generated events", + "Number of messages", + "Number of events buffered"] From 3712d7c39420d68ff1c553825908102e4d567412 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Mon, 19 Feb 2024 10:40:57 +0100 Subject: [PATCH 04/20] add wazuhdb header --- .../tools/performance/statistic_headers.py | 173 ++++++++++++++++++ 1 file changed, 173 insertions(+) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py index 2409a6b7f4..9d1dc24fff 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py @@ -164,3 +164,176 @@ "Number of generated events", "Number of messages", "Number of events buffered"] + + +wazuhdb_header = ["Timestamp", + "API Timestamp", + "Interval (Timestamp-Uptime)", + ## QUERIES COUNTS + "Received queries", + "Agent queries", + ## Agent QueriesBreakdown + "db-begin", + "db-close", + "db-commit", + "db-remove", + "db-sql", + "db-vacuum", + "db-get_fragmentation", + ## Agent Tables Breakdown + "Table CisCat", + "Table Rootcheck", + "Table SCA", + "Table dbsync", + "Table Syscheck", + "Table Syscheck file", + "Table Syscheck registry", + "Table Syscheck registry_key", + "Table Syscheck registry_value", + "Table Syscollector hotfixes", + "Table Syscollector hwinfo", + "Table Syscollector network_address", + "Table Syscollector network_iface", + "Table Syscollector network_protocol", + "Table Syscollector packages", + "Table Syscollector ports", + "Table Syscollector processes", + "Table Vulnerability CVEs", + + "Global queries", + ## Global Queries Breakdown + "db-backup", + "db-sql", + "db-vacuum", + "db-get_fragmentation", + + "agent-delete-agent", + "agent-disconnect-agents", + "agent-find-agent", + "agent-get-agent-info", + "agent-get-agents-by-connection-status", + "agent-get-all-agents", + "agent-get-distinct-groups", + "agent-get-groups-integrity", + "agent-insert-agent", + "agent-reset-agents-connection", + "agent-select-agent-group", + "agent-select-agent-name", + "agent-set-agent-groups", + "agent-sync-agent-groups-get", + "agent-sync-agent-info-get", + "agent-sync-agent-info-set", + "agent-update-agent-data", + "agent-update-agent-name", + "agent-update-connection-status", + "agent-update-status-code", + "agent-update-keepalive", + + "belongs-get-group-agents", + "belongs-select-group-belong", + + "group-delete-group", + "group-find-group", + "group-insert-agent-group", + "group-select-groups", + "labels-get-labels", + + "MITRE", + ## Mitre breakdown + "Tasks", + ## tasks breakdown + "tasks-delete old task", + "tasks-set timeout", + "tasks-upgrade", + "tasks-upgrade cancel", + "tasks-upgrade get status", + "tasks-upgrade results", + "tasks-upgrade update status", + "Wazuhdb", + + ## QUERIES TIME METRICS - IN MILISECONDS + "Total Execution Time", + "Agent ExecTime", + ## Agent Time Breakdown + "db-open", + "db-begin", + "db-close", + "db-commit", + "db-remove", + "db-sql", + "db-vacuum", + "db-get_fragmentation", + ## Agent Tables Breakdown + "Table CisCat", + "Table Rootcheck", + "Table SCA", + "Table dbsync", + "Table Syscheck", + "Table Syscheck file", + "Table Syscheck registry", + "Table Syscheck registry_key", + "Table Syscheck registry_value", + "Table Syscollector hotfixes", + "Table Syscollector hwinfo", + "Table Syscollector network_address", + "Table Syscollector network_iface", + "Table Syscollector network_protocol", + "Table Syscollector packages", + "Table Syscollector ports", + "Table Syscollector processes", + "Table Vulnerability CVEs", + + ## Global Queries Time Breakdown + "Global Queries ExecTime", + "db-open", + "db-backup", + "db-sql", + "db-vacuum", + "db-get_fragmentation", + ## Global Tables breakdown + "agent-delete-agent", + "agent-disconnect-agents", + "agent-find-agent", + "agent-get-agent-info", + "agent-get-agents-by-connection-status", + "agent-get-all-agents", + "agent-get-distinct-groups", + "agent-get-groups-integrity", + "agent-insert-agent", + "agent-reset-agents-connection", + "agent-select-agent-group", + "agent-select-agent-name", + "agent-set-agent-groups", + "agent-sync-agent-groups-get", + "agent-sync-agent-info-get", + "agent-sync-agent-info-set", + "agent-update-agent-data", + "agent-update-agent-name", + "agent-update-connection-status", + "agent-update-status-code", + "agent-update-keepalive", + + "belongs-get-group-agents", + "belongs-select-group-belong", + + "group-delete-group", + "group-find-group", + "group-insert-agent-group", + "group-select-groups", + + "labels-get-labels", + + "MITRE", + ## Mitre breakdown + "Tasks", + ## tasks breakdown + "tasks-delete old task", + "tasks-set timeout", + "tasks-upgrade", + "tasks-upgrade cancel", + "tasks-upgrade get status", + "tasks-upgrade results", + "tasks-upgrade update status", + + "Wazuhdb" + ] From a1d40082079eb461306884f8d66599c56bb304a1 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Mon, 19 Feb 2024 12:55:04 +0100 Subject: [PATCH 05/20] add missing headers --- .../tools/performance/statistic_headers.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py index 9d1dc24fff..a7b15c0423 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py @@ -169,10 +169,10 @@ wazuhdb_header = ["Timestamp", "API Timestamp", "Interval (Timestamp-Uptime)", - ## QUERIES COUNTS + ## QUERIES COUNTS "Received queries", "Agent queries", - ## Agent QueriesBreakdown + ## Agent QueriesBreakdown "db-begin", "db-close", "db-commit", @@ -180,7 +180,7 @@ "db-sql", "db-vacuum", "db-get_fragmentation", - ## Agent Tables Breakdown + ## Agent Tables Breakdown "Table CisCat", "Table Rootcheck", "Table SCA", @@ -195,6 +195,7 @@ "Table Syscollector network_address", "Table Syscollector network_iface", "Table Syscollector network_protocol", + "Table Syscollector os_info", "Table Syscollector packages", "Table Syscollector ports", "Table Syscollector processes", @@ -239,13 +240,13 @@ "labels-get-labels", "MITRE", - ## Mitre breakdown "Tasks", ## tasks breakdown "tasks-delete old task", "tasks-set timeout", "tasks-upgrade", "tasks-upgrade cancel", + "tasks-upgrade custom", "tasks-upgrade get status", "tasks-upgrade results", "tasks-upgrade update status", @@ -278,6 +279,7 @@ "Table Syscollector network_address", "Table Syscollector network_iface", "Table Syscollector network_protocol", + "Table Syscollector os_info", "Table Syscollector packages", "Table Syscollector ports", "Table Syscollector processes", @@ -312,28 +314,24 @@ "agent-update-connection-status", "agent-update-status-code", "agent-update-keepalive", - "belongs-get-group-agents", "belongs-select-group-belong", - "group-delete-group", "group-find-group", "group-insert-agent-group", "group-select-groups", - "labels-get-labels", "MITRE", - ## Mitre breakdown "Tasks", ## tasks breakdown "tasks-delete old task", "tasks-set timeout", "tasks-upgrade", "tasks-upgrade cancel", + "tasks-upgrade custom", "tasks-upgrade get status", "tasks-upgrade results", "tasks-upgrade update status", - "Wazuhdb" ] From 3486a9f06a0950993e9fa98c1b5007967902869c Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Mon, 19 Feb 2024 15:45:37 +0100 Subject: [PATCH 06/20] add wazuhdb parsing --- .../tools/performance/statistic.py | 177 +++++++++++++++++- 1 file changed, 175 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index 0447af9102..855420719b 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -63,7 +63,7 @@ def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_api=Fa self.use_api = use_api self.parse_json = False - if self.use_api == True and self.target != 'analysis_events' and self.target != 'remoted': + if self.use_api == True and self.target != 'analysis_events' and self.target != 'remoted' and self.target != 'wazuhdb': self.use_api = False if self.target == 'agent': @@ -75,7 +75,7 @@ def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_api=Fa self.statistics_file = tls.REMOTE_STATISTICS_FILE elif self.target == 'analysis_state': self.statistics_file = tls.ANALYSIS_STATISTICS_FILE - elif self.target == 'analysis_events': + elif self.target == 'analysis_events' or self.target == 'wazuhdb': self.use_api = True else: raise ValueError(f'The target {self.target} is not a valid one.') @@ -176,6 +176,8 @@ def _write_csv(self, data, target, csv_file): csv_header = headers.logcollector_header elif target == "remoted": csv_header = headers.remoted_api_header if self.use_api == True else headers.remoted_header + elif target == "wazuhdb": + csv_header = headers.wazuhdb_header else: csv_header = headers.agentd_header @@ -195,6 +197,8 @@ def _write_csv(self, data, target, csv_file): data = data[0] elif target == "remoted": data = data[1] + elif target == "wazuhdb": + data = data[2] format = r"%Y-%m-%dT%H:%M:%S+%f:00" datetime_timestamp = datetime.strptime(data['timestamp'], format) @@ -315,6 +319,175 @@ def _write_csv(self, data, target, csv_file): metrics['bytes']['received'], # 23 metrics['bytes']['sent'] # 24 )) + else: + received_breakdown = data['metrics']['queries']['received_breakdown'] + ag_bd = received_breakdown['agent_breakdown'] + glob_bd = received_breakdown['global_breakdown'] + execution_breakdown = data['metrics']['time']['execution_breakdown'] + exec_ag_bd = execution_breakdown['agent_breakdown'] + exec_glob_bd = execution_breakdown['global_breakdown'] + logger.info("Writing wazuh-db data from API info to {}.".format(csv_file)) + log.write(("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},"+ + "{19},{20},{21},{22},{23},{24},{25},{26},{27},{28},{29},{30},{31},{32},{33},{34},{35},"+ + "{36},{37},{38},{39},{40},{41},{42},{43},{44},{45},{46},{47},{48},{49},{50},{51},{52},"+ + "{53},{54},{55},{56},{57},{58},{59},{60},{61},{62},{63},{64},{65},{66},{67},{68},{69},"+ + "{70},{71},{72},{73},{74},{75},{76},{77},{78},{79},{70},{71},{72},{73},{74},{75},{76},"+ + "{77},{78},{79},{80},{81},{82},{83},{84},{85},{86},{87},{88},{89},{90},{91},{92},{93},"+ + "{94},{95},{96},{97},{98},{99},{100},{101},{102},{103},{104},{105},{106},{107},{108},"+ + "{109},{110},{11},{111},{112},{113},{114},{115},{116},{117},{118},{119},{120},{121},"+ + "{122},{123},{124},{125},{126},{127},{128},{129},{130},{131},{132},{133},{134},{135},"+ + "{136},{137},{138},{139},{140},{141},{142},{143},{145},{146},{147},{148}\n").format( + timestamp, # 0 + data['timestamp'], # 1 + interval, # 2 + data['metrics']['queries']['received'], # 3 + received_breakdown['agent'], # 4 + ag_bd['db']['begin'], # 5 + ag_bd['db']['close'], # 6 + ag_bd['db']['commit'], # 7 + ag_bd['db']['remove'], # 8 + ag_bd['db']['sql'], # 9 + ag_bd['db']['vacuum'], # 10 + ag_bd['db']['get_fragmentation'], # 11 + ag_bd['tables']['ciscat']['ciscat'], # 12 + ag_bd['tables']['rootcheck']['rootcheck'], # 13 + ag_bd['tables']['sca']['sca'], # 14 + ag_bd['tables']['sync']['dbsync'], # 15 + ag_bd['tables']['syscheck']['syscheck'], # 16 + ag_bd['tables']['syscheck']['fim_file'], # 17 + ag_bd['tables']['syscheck']['fim_registry'], # 18 + ag_bd['tables']['syscheck']['fim_registry_key'], # 19 + ag_bd['tables']['syscheck']['fim_registry_value'], # 20 + ag_bd['tables']['syscollector']['syscollector_hotfixes'], # 21 + ag_bd['tables']['syscollector']['syscollector_hwinfo'], # 22 + ag_bd['tables']['syscollector']['syscollector_network_address'], # 23 + ag_bd['tables']['syscollector']['syscollector_network_iface'], # 24 + ag_bd['tables']['syscollector']['syscollector_network_protocol'], # 25 + ag_bd['tables']['syscollector']['syscollector_osinfo'], # 26 + ag_bd['tables']['syscollector']['syscollector_packages'], # 27 + ag_bd['tables']['syscollector']['syscollector_ports'], # 28 + ag_bd['tables']['syscollector']['syscollector_processes'], # 29 + ag_bd['tables']['vulnerability']['vuln_cves'], # 30 + received_breakdown['global'], # 31 + glob_bd['db']['backup'], # 32 + glob_bd['db']['sql'], # 33 + glob_bd['db']['vacuum'], # 34 + glob_bd['db']['get_fragmentation'], # 35 + glob_bd['tables']['agent']['delete-agent'], # 36 + glob_bd['tables']['agent']['disconnect-agents'], # 37 + glob_bd['tables']['agent']['find-agent'], # 38 + glob_bd['tables']['agent']['get-agent-info'], # 39 + glob_bd['tables']['agent']['get-agents-by-connection-status'], # 40 + glob_bd['tables']['agent']['get-all-agents'], # 41 + glob_bd['tables']['agent']['get-distinct-groups'], # 42 + glob_bd['tables']['agent']['get-groups-integrity'], # 43 + glob_bd['tables']['agent']['insert-agent'], # 44 + glob_bd['tables']['agent']['reset-agents-connection'], # 45 + glob_bd['tables']['agent']['select-agent-group'], # 46 + glob_bd['tables']['agent']['select-agent-name'], # 47 + glob_bd['tables']['agent']['set-agent-groups'], # 48 + glob_bd['tables']['agent']['sync-agent-groups-get'], # 49 + glob_bd['tables']['agent']['sync-agent-info-get'], # 50 + glob_bd['tables']['agent']['sync-agent-info-set'], # 51 + glob_bd['tables']['agent']['update-agent-data'], # 52 + glob_bd['tables']['agent']['update-agent-name'], # 53 + glob_bd['tables']['agent']['update-connection-status'], # 54 + glob_bd['tables']['agent']['update-status-code'], # 55 + glob_bd['tables']['agent']['update-keepalive'], # 56 + glob_bd['tables']['belongs']['get-group-agents'], # 57 + glob_bd['tables']['belongs']['select-group-belong'], # 58 + glob_bd['tables']['group']['delete-group'], # 59 + glob_bd['tables']['group']['find-group'], # 60 + glob_bd['tables']['group']['insert-agent-group'], # 61 + glob_bd['tables']['group']['select-groups'], # 62 + glob_bd['tables']['labels']['get-labels'], # 63 + received_breakdown['mitre'], # 64 + received_breakdown['task'], # 65 + received_breakdown['task_breakdown']['tables']['tasks']['delete_old'], # 66 + received_breakdown['task_breakdown']['tables']['tasks']['set_timeout'], # 67 + received_breakdown['task_breakdown']['tables']['tasks']['upgrade'], # 68 + received_breakdown['task_breakdown']['tables']['tasks']['upgrade_cancel_tasks'], # 69 + received_breakdown['task_breakdown']['tables']['tasks']['upgrade_custom'], # 70 + received_breakdown['task_breakdown']['tables']['tasks']['upgrade_get_status'], # 71 + received_breakdown['task_breakdown']['tables']['tasks']['upgrade_result'], # 72 + received_breakdown['task_breakdown']['tables']['tasks']['upgrade_update_status'], # 73 + received_breakdown['wazuhdb'], # 74 + + data['metrics']['time']['execution'], # 75 + execution_breakdown['agent'], # 76 + exec_ag_bd['db']['open'], # 77 + exec_ag_bd['db']['begin'], # 78 + exec_ag_bd['db']['close'], # 79 + exec_ag_bd['db']['commit'], # 80 + exec_ag_bd['db']['remove'], # 81 + exec_ag_bd['db']['sql'], # 82 + exec_ag_bd['db']['vacuum'], # 83 + exec_ag_bd['db']['get_fragmentation'], # 84 + exec_ag_bd['tables']['ciscat']['ciscat'], # 85 + exec_ag_bd['tables']['rootcheck']['rootcheck'], # 86 + exec_ag_bd['tables']['sca']['sca'], # 87 + exec_ag_bd['tables']['sync']['dbsync'], # 88 + exec_ag_bd['tables']['syscheck']['syscheck'], # 89 + exec_ag_bd['tables']['syscheck']['fim_file'], # 90 + exec_ag_bd['tables']['syscheck']['fim_registry'], # 91 + exec_ag_bd['tables']['syscheck']['fim_registry_key'], # 92 + exec_ag_bd['tables']['syscheck']['fim_registry_value'], # 93 + exec_ag_bd['tables']['syscollector']['syscollector_hotfixes'], # 94 + exec_ag_bd['tables']['syscollector']['syscollector_hwinfo'], # 95 + exec_ag_bd['tables']['syscollector']['syscollector_network_address'], # 96 + exec_ag_bd['tables']['syscollector']['syscollector_network_iface'], # 97 + exec_ag_bd['tables']['syscollector']['syscollector_network_protocol'], # 98 + exec_ag_bd['tables']['syscollector']['syscollector_osinfo'], # 99 + exec_ag_bd['tables']['syscollector']['syscollector_packages'], # 100 + exec_ag_bd['tables']['syscollector']['syscollector_ports'], # 101 + exec_ag_bd['tables']['syscollector']['syscollector_processes'], # 102 + exec_ag_bd['tables']['vulnerability']['vuln_cves'], # 103 + data['metrics']['time']['execution'], # 104 + exec_glob_bd['db']['open'], # 105 + exec_glob_bd['db']['backup'], # 106 + exec_glob_bd['db']['sql'], # 107 + exec_glob_bd['db']['vacuum'], # 108 + exec_glob_bd['db']['get_fragmentation'], # 109 + exec_glob_bd['tables']['agent']['delete-agent'], # 110 + exec_glob_bd['tables']['agent']['disconnect-agents'], # 111 + exec_glob_bd['tables']['agent']['find-agent'], # 112 + exec_glob_bd['tables']['agent']['get-agent-info'], # 113 + exec_glob_bd['tables']['agent']['get-agents-by-connection-status'], # 114 + exec_glob_bd['tables']['agent']['get-all-agents'], # 115 + exec_glob_bd['tables']['agent']['get-distinct-groups'], # 116 + exec_glob_bd['tables']['agent']['get-groups-integrity'], # 117 + exec_glob_bd['tables']['agent']['insert-agent'], # 118 + exec_glob_bd['tables']['agent']['reset-agents-connection'], # 119 + exec_glob_bd['tables']['agent']['select-agent-group'], # 120 + exec_glob_bd['tables']['agent']['select-agent-name'], # 121 + exec_glob_bd['tables']['agent']['set-agent-groups'], # 122 + exec_glob_bd['tables']['agent']['sync-agent-groups-get'], # 123 + exec_glob_bd['tables']['agent']['sync-agent-info-get'], # 124 + exec_glob_bd['tables']['agent']['sync-agent-info-set'], # 125 + exec_glob_bd['tables']['agent']['update-agent-data'], # 126 + exec_glob_bd['tables']['agent']['update-agent-name'], # 127 + exec_glob_bd['tables']['agent']['update-connection-status'], # 128 + exec_glob_bd['tables']['agent']['update-status-code'], # 129 + exec_glob_bd['tables']['agent']['update-keepalive'], + exec_glob_bd['tables']['belongs']['get-group-agents'], + exec_glob_bd['tables']['belongs']['select-group-belong'], + exec_glob_bd['tables']['group']['delete-group'], + exec_glob_bd['tables']['group']['find-group'], + exec_glob_bd['tables']['group']['insert-agent-group'], + exec_glob_bd['tables']['group']['select-groups'], + exec_glob_bd['tables']['labels']['get-labels'], + execution_breakdown['mitre'], + execution_breakdown['task'], + execution_breakdown['task_breakdown']['tables']['tasks']['delete_old'], + execution_breakdown['task_breakdown']['tables']['tasks']['set_timeout'], + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade'], + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_cancel_tasks'], + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_custom'], + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_get_status'], + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_result'], + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_update_status'], + execution_breakdown['wazuhdb'] + )) else: if target == "analysis_state": logger.info("Writing analysisd.state info to {}.".format(csv_file)) From 2ac54a60498b35b34da527d8a8a082362f186dad Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Tue, 20 Feb 2024 11:03:59 +0100 Subject: [PATCH 07/20] change remoted parameter to remote --- .../wazuh_testing/scripts/wazuh_statistics.py | 2 +- .../wazuh_testing/tools/performance/statistic.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py index 034389bf9b..52331e22ff 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py @@ -26,7 +26,7 @@ def get_script_arguments(): formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-t', '--target', dest='target_list', required=True, type=str, nargs='+', action='store', help='Type the statistics target to collect separated by whitespace. ' - 'Targets: agent, logcollector, remoted, analysis-events and analysisd-state.') + 'Targets: agent, logcollector, remoted, analysis_events and analysisd_state.') parser.add_argument('-s', '--sleep', dest='sleep_time', type=float, default=5, action='store', help='Type the time in seconds between each entry.') parser.add_argument('-d', '--debug', dest='debug', action='store_true', default=False, diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index 855420719b..fb3f316104 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -63,7 +63,7 @@ def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_api=Fa self.use_api = use_api self.parse_json = False - if self.use_api == True and self.target != 'analysis_events' and self.target != 'remoted' and self.target != 'wazuhdb': + if self.use_api == True and self.target != 'analysis_events' and self.target != 'remote' and self.target != 'wazuhdb': self.use_api = False if self.target == 'agent': @@ -71,7 +71,7 @@ def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_api=Fa elif self.target == 'logcollector': self.statistics_file = tls.LOGCOLLECTOR_STATISTICS_FILE self.parse_json = True - elif self.target == 'remoted': + elif self.target == 'remote': self.statistics_file = tls.REMOTE_STATISTICS_FILE elif self.target == 'analysis_state': self.statistics_file = tls.ANALYSIS_STATISTICS_FILE @@ -174,7 +174,7 @@ def _write_csv(self, data, target, csv_file): csv_header = headers.analysisd_events_header elif target == "logcollector": csv_header = headers.logcollector_header - elif target == "remoted": + elif target == "remote": csv_header = headers.remoted_api_header if self.use_api == True else headers.remoted_header elif target == "wazuhdb": csv_header = headers.wazuhdb_header @@ -195,7 +195,7 @@ def _write_csv(self, data, target, csv_file): ## Get data from API response if target == "analysis_events": data = data[0] - elif target == "remoted": + elif target == "remote": data = data[1] elif target == "wazuhdb": data = data[2] @@ -286,7 +286,7 @@ def _write_csv(self, data, target, csv_file): decoded['monitor'] / interval, # 62 decoded['remote'] / interval, # 63 )) - elif target == "remoted": + elif target == "remote": metrics = data['metrics'] received_messages = metrics['messages']['received_breakdown'] sent_messages = metrics['messages']['sent_breakdown'] @@ -543,7 +543,7 @@ def _write_csv(self, data, target, csv_file): log.write("{0},{1},{2},{3},{4},{5}\n".format( timestamp, data['location'], data['events'], data['bytes'], data['target'], data['target_drops'])) - elif target == "remoted": + elif target == "remote": logger.info("Writing remoted.state info to {}.".format(csv_file)) log.write("{0},{1},{2},{3},{4},{5},{6},{7}\n".format( timestamp, # 0 From e128396cfb827a0664fedc8b9e0e7a6cc1b47c83 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Tue, 20 Feb 2024 11:44:52 +0100 Subject: [PATCH 08/20] fix target argument documentation --- deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py index 52331e22ff..7fab39348d 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py @@ -26,7 +26,7 @@ def get_script_arguments(): formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-t', '--target', dest='target_list', required=True, type=str, nargs='+', action='store', help='Type the statistics target to collect separated by whitespace. ' - 'Targets: agent, logcollector, remoted, analysis_events and analysisd_state.') + 'Targets: agent, logcollector, remote, analysis_events, analysisd_state and wazuhdb') parser.add_argument('-s', '--sleep', dest='sleep_time', type=float, default=5, action='store', help='Type the time in seconds between each entry.') parser.add_argument('-d', '--debug', dest='debug', action='store_true', default=False, From b0ce46f10c4d8ad08d01396715d1d9446d36c0be Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Wed, 21 Feb 2024 10:49:29 +0100 Subject: [PATCH 09/20] enhance documentation --- .../tools/performance/statistic.py | 43 +++++++++---------- 1 file changed, 21 insertions(+), 22 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index fb3f316104..a56fd926f8 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -29,7 +29,8 @@ class StatisticMonitor: - """This class generates a Python object to monitor the statistics file generated by Wazuh. + """This class generates a Python object to monitor the statistics file generated by Wazuh. It also recovers data + using Wazuh's API for the analysisd, remoted and wazuh-db daemons. There are four files: wazuh-analysisd.state, wazuh-remoted.state, wazuh-agentd.state and wazuh-logcollector.state and each one of them has unique characteristics and data. This class will parse the file, extract the data to a @@ -166,8 +167,6 @@ def _write_csv(self, data, target, csv_file): target (string): specifies which CSV must be generated csv_file (string): path to the CSV file. """ - - if target == "analysis_state": csv_header = headers.analysisd_header elif target == "analysis_events": @@ -468,25 +467,25 @@ def _write_csv(self, data, target, csv_file): exec_glob_bd['tables']['agent']['update-agent-name'], # 127 exec_glob_bd['tables']['agent']['update-connection-status'], # 128 exec_glob_bd['tables']['agent']['update-status-code'], # 129 - exec_glob_bd['tables']['agent']['update-keepalive'], - exec_glob_bd['tables']['belongs']['get-group-agents'], - exec_glob_bd['tables']['belongs']['select-group-belong'], - exec_glob_bd['tables']['group']['delete-group'], - exec_glob_bd['tables']['group']['find-group'], - exec_glob_bd['tables']['group']['insert-agent-group'], - exec_glob_bd['tables']['group']['select-groups'], - exec_glob_bd['tables']['labels']['get-labels'], - execution_breakdown['mitre'], - execution_breakdown['task'], - execution_breakdown['task_breakdown']['tables']['tasks']['delete_old'], - execution_breakdown['task_breakdown']['tables']['tasks']['set_timeout'], - execution_breakdown['task_breakdown']['tables']['tasks']['upgrade'], - execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_cancel_tasks'], - execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_custom'], - execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_get_status'], - execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_result'], - execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_update_status'], - execution_breakdown['wazuhdb'] + exec_glob_bd['tables']['agent']['update-keepalive'], # 130 + exec_glob_bd['tables']['belongs']['get-group-agents'], # 131 + exec_glob_bd['tables']['belongs']['select-group-belong'], # 132 + exec_glob_bd['tables']['group']['delete-group'], # 133 + exec_glob_bd['tables']['group']['find-group'], # 134 + exec_glob_bd['tables']['group']['insert-agent-group'], # 135 + exec_glob_bd['tables']['group']['select-groups'], # 136 + exec_glob_bd['tables']['labels']['get-labels'], # 137 + execution_breakdown['mitre'], # 138 + execution_breakdown['task'], # 139 + execution_breakdown['task_breakdown']['tables']['tasks']['delete_old'], # 140 + execution_breakdown['task_breakdown']['tables']['tasks']['set_timeout'], # 141 + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade'], # 142 + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_cancel_tasks'], # 143 + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_custom'], # 144 + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_get_status'], # 145 + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_result'], # 146 + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_update_status'], # 147 + execution_breakdown['wazuhdb'] # 148 )) else: if target == "analysis_state": From 8128e8a9a2702d28ba4216603d8539104506be48 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Wed, 21 Feb 2024 13:27:52 +0100 Subject: [PATCH 10/20] add port and ip for API --- .../wazuh_testing/scripts/wazuh_statistics.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py index 7fab39348d..bd42fd8e72 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py @@ -36,6 +36,10 @@ def get_script_arguments(): parser.add_argument('-a', '--use_api', dest='use_api', type=bool, action='store', default=False, help="Determine if the API should be used to collect the data. Default False." "For remoted set to True to get data from API. analysis_events uses API by default.") + parser.add_argument('-i', '--ip' dest='ip', action='store', default='localhost', + help=f"IP for the API. Default localhost.") + parser.add_argument('-p', '--port' dest='port', action='store', default='55000', + help=f"port for the API. Default localhost.") return parser.parse_args() @@ -54,7 +58,8 @@ def main(): logger.info(f'Started new session: {CURRENT_SESSION}') for target in options.target_list: - monitor = StatisticMonitor(target=target, time_step=options.sleep_time, dst_dir=options.store_path, use_api=options.use_api) + monitor = StatisticMonitor(target=target, time_step=options.sleep_time, dst_dir=options.store_path, + use_api=options.use_api, ip=options.ip, port=options.port) MONITOR_LIST.append(monitor) monitor.start() From 135c652d304ea25402794a9cabd67fa7d46377e5 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Wed, 21 Feb 2024 13:28:09 +0100 Subject: [PATCH 11/20] add port and IP for API --- .../tools/performance/statistic.py | 60 +++++++++---------- 1 file changed, 27 insertions(+), 33 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index a56fd926f8..682482caf7 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -1,6 +1,3 @@ - - - # Copyright (C) 2015-2021, Wazuh Inc. # Created by Wazuh, Inc. . # This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 @@ -21,10 +18,6 @@ urllib3.disable_warnings() -API_URL="https://localhost:55000" -DAEMONS_ENDPOINT="/manager/daemons/stats?daemons_list=wazuh-analysisd,wazuh-remoted,wazuh-db" -TOKEN_ENDPOINT="/security/user/authenticate" - logger = logging.getLogger('wazuh-statistics-monitor') @@ -44,6 +37,8 @@ class StatisticMonitor: target (str, optional): target file to monitor. dst_dir (str, optional): path to store the file. use_api (bool, optional): Determine if the API should be used to collect the data. Default False. + ip (str, optional): IP address of the API. Default localhost. + port (str, optional): Port of the API. Default 55000. Attributes: event (thread.Event): thread Event used to control the scans. @@ -55,16 +50,19 @@ class StatisticMonitor: parse_json (bool): Determine if the file is a JSON file. Default False. """ - def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_api=False): + def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_api=False, ip = 'localhost', port = '55000'): self.event = None self.thread = None self.time_step = time_step self.target = target self.dst_dir = dst_dir self.use_api = use_api + self.ip = ip + self.port = port + self.daemon = None self.parse_json = False - if self.use_api == True and self.target != 'analysis_events' and self.target != 'remote' and self.target != 'wazuhdb': + if self.use_api == True and self.target != 'analysis' and self.target != 'remote' and self.target != 'wazuhdb': self.use_api = False if self.target == 'agent': @@ -74,10 +72,13 @@ def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_api=Fa self.parse_json = True elif self.target == 'remote': self.statistics_file = tls.REMOTE_STATISTICS_FILE - elif self.target == 'analysis_state': + self.daemon = 'wazuh-remoted' + elif self.target == 'analysis': self.statistics_file = tls.ANALYSIS_STATISTICS_FILE - elif self.target == 'analysis_events' or self.target == 'wazuhdb': + self.daemon = 'wazuh-analysisd' + elif self.target == 'wazuhdb': self.use_api = True + self.daemon = 'wazuh-db' else: raise ValueError(f'The target {self.target} is not a valid one.') @@ -140,22 +141,26 @@ def _parse_state_file(self): logger.error(f'Exception with {self.statistics_file} | {str(e)}') - def _parse_api_data(self): + def _parse_api_data(self, ip, port): """Read the data from the statistics file generated by Wazuh API.""" + API_URL = f"https://{ip}:{port}" + DAEMONS_ENDPOINT= f"/manager/daemons/stats?daemons_list={self.daemon}" + TOKEN_ENDPOINT="/security/user/authenticate" + logging.info("Getting statistics data from API for {}".format(self.target)) response = requests.get(API_URL + TOKEN_ENDPOINT, verify=False, auth=requests.auth.HTTPBasicAuth("wazuh", "wazuh")) if response.status_code != 200: - logging.info("Retrying get API data, status code {}".format(response.status_code)) - return self._parse_api_data() + logging.error("Retrying get API data, status code {}".format(response.status_code)) + return self._parse_api_data(ip, port) daemons_response = requests.get(API_URL + DAEMONS_ENDPOINT, verify=False, headers={'Authorization': 'Bearer ' + response.json()['data']['token']}) if daemons_response.status_code != 200: - logging.info("Retrying get API data, status code {}".format(response.status_code)) - return self._parse_api_data() + logging.error("Retrying get API data, status code {}".format(response.status_code)) + return self._parse_api_data(ip, port) - data = daemons_response.json()['data']['affected_items'] + data = daemons_response.json()['data']['affected_items'][0] self._write_csv(data, self.target, self.csv_file) @@ -167,10 +172,8 @@ def _write_csv(self, data, target, csv_file): target (string): specifies which CSV must be generated csv_file (string): path to the CSV file. """ - if target == "analysis_state": - csv_header = headers.analysisd_header - elif target == "analysis_events": - csv_header = headers.analysisd_events_header + if target == "analysis": + csv_header = headers.analysisd_header if self.use_api == False else headers.analysisd_events_header elif target == "logcollector": csv_header = headers.logcollector_header elif target == "remote": @@ -190,21 +193,12 @@ def _write_csv(self, data, target, csv_file): timestamp = datetime.fromtimestamp(time()).strftime('%Y-%m-%d %H:%M:%S') if self.use_api: - - ## Get data from API response - if target == "analysis_events": - data = data[0] - elif target == "remote": - data = data[1] - elif target == "wazuhdb": - data = data[2] - format = r"%Y-%m-%dT%H:%M:%S+%f:00" datetime_timestamp = datetime.strptime(data['timestamp'], format) datetime_uptime = datetime.strptime(data['uptime'], format) interval = (datetime_timestamp - datetime_uptime).total_seconds() - if target == "analysis_events": + if target == "analysis": metrics = data['metrics'] decoded = metrics['events']['received_breakdown']['decoded_breakdown'] decoded_modules = decoded['modules_breakdown'] @@ -488,7 +482,7 @@ def _write_csv(self, data, target, csv_file): execution_breakdown['wazuhdb'] # 148 )) else: - if target == "analysis_state": + if target == "analysis": logger.info("Writing analysisd.state info to {}.".format(csv_file)) log.write(("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},{19},{20},"+ "{21},{22},{23},{24},{25},{26},{27},{28},{29},{30},{31},{32},{33},{34},{35},{36},{37},{38},{39},{40},{41},{42}\n") @@ -565,7 +559,7 @@ def _monitor_stats(self): """Read the .state files and log the data into a CSV file.""" while not self.event.is_set(): if self.use_api: - self._parse_api_data() + self._parse_api_data(self.ip, self.port) else: self._parse_state_file() sleep(self.time_step) From 15d80c847a3fbf1823c75d597e1640aa38b6b4f2 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Wed, 21 Feb 2024 13:29:24 +0100 Subject: [PATCH 12/20] fix documentation --- deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py index bd42fd8e72..8f2b44a443 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py @@ -26,7 +26,7 @@ def get_script_arguments(): formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-t', '--target', dest='target_list', required=True, type=str, nargs='+', action='store', help='Type the statistics target to collect separated by whitespace. ' - 'Targets: agent, logcollector, remote, analysis_events, analysisd_state and wazuhdb') + 'Targets: agent, logcollector, remote, analysis and wazuhdb') parser.add_argument('-s', '--sleep', dest='sleep_time', type=float, default=5, action='store', help='Type the time in seconds between each entry.') parser.add_argument('-d', '--debug', dest='debug', action='store_true', default=False, @@ -35,7 +35,7 @@ def get_script_arguments(): help=f"Path to store the CSVs with the data. Default {gettempdir()}.") parser.add_argument('-a', '--use_api', dest='use_api', type=bool, action='store', default=False, help="Determine if the API should be used to collect the data. Default False." - "For remoted set to True to get data from API. analysis_events uses API by default.") + "For remoted and analysis set to True to get data from API. wazuhdb uses API by default.") parser.add_argument('-i', '--ip' dest='ip', action='store', default='localhost', help=f"IP for the API. Default localhost.") parser.add_argument('-p', '--port' dest='port', action='store', default='55000', From 0da9d00f7c59ae1f78cd6445608e41229f5293f6 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Wed, 21 Feb 2024 15:35:43 +0100 Subject: [PATCH 13/20] replace use_api parameter for use_state_file --- .../wazuh_testing/scripts/wazuh_statistics.py | 12 ++-- .../tools/performance/statistic.py | 66 ++++++++++++------- 2 files changed, 47 insertions(+), 31 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py index 8f2b44a443..dbcd0a34fb 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py @@ -33,12 +33,12 @@ def get_script_arguments(): help='Enable debug level logging.') parser.add_argument('--store', dest='store_path', action='store', default=gettempdir(), help=f"Path to store the CSVs with the data. Default {gettempdir()}.") - parser.add_argument('-a', '--use_api', dest='use_api', type=bool, action='store', default=False, - help="Determine if the API should be used to collect the data. Default False." - "For remoted and analysis set to True to get data from API. wazuhdb uses API by default.") - parser.add_argument('-i', '--ip' dest='ip', action='store', default='localhost', + parser.add_argument('-u', '--use_state_file', action='store_true', default=False, + help="Determine if the state files should be used to collect the for analysisd and remoted." + "Use with remoted and analysis to get data from state files. Default False.") + parser.add_argument('-i', '--ip', dest='ip', action='store', default='localhost', help=f"IP for the API. Default localhost.") - parser.add_argument('-p', '--port' dest='port', action='store', default='55000', + parser.add_argument('-p', '--port', dest='port', action='store', default='55000', help=f"port for the API. Default localhost.") return parser.parse_args() @@ -59,7 +59,7 @@ def main(): for target in options.target_list: monitor = StatisticMonitor(target=target, time_step=options.sleep_time, dst_dir=options.store_path, - use_api=options.use_api, ip=options.ip, port=options.port) + use_state_file=options.use_state_file, ip=options.ip, port=options.port) MONITOR_LIST.append(monitor) monitor.start() diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index 682482caf7..26a4ba3d89 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -36,7 +36,7 @@ class StatisticMonitor: time_step (int): Time between intervals. target (str, optional): target file to monitor. dst_dir (str, optional): path to store the file. - use_api (bool, optional): Determine if the API should be used to collect the data. Default False. + use_state_file (bool, optional): Determine if the API should be used to collect the data. Default False. ip (str, optional): IP address of the API. Default localhost. port (str, optional): Port of the API. Default 55000. @@ -50,25 +50,25 @@ class StatisticMonitor: parse_json (bool): Determine if the file is a JSON file. Default False. """ - def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_api=False, ip = 'localhost', port = '55000'): + def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_state_file=False, ip = 'localhost', port = '55000'): self.event = None self.thread = None self.time_step = time_step self.target = target self.dst_dir = dst_dir - self.use_api = use_api + self.use_state_file = use_state_file self.ip = ip self.port = port self.daemon = None self.parse_json = False - if self.use_api == True and self.target != 'analysis' and self.target != 'remote' and self.target != 'wazuhdb': - self.use_api = False if self.target == 'agent': self.statistics_file = tls.AGENT_STATISTICS_FILE + self.use_state_file = True elif self.target == 'logcollector': self.statistics_file = tls.LOGCOLLECTOR_STATISTICS_FILE + self.use_state_file = True self.parse_json = True elif self.target == 'remote': self.statistics_file = tls.REMOTE_STATISTICS_FILE @@ -77,12 +77,11 @@ def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_api=Fa self.statistics_file = tls.ANALYSIS_STATISTICS_FILE self.daemon = 'wazuh-analysisd' elif self.target == 'wazuhdb': - self.use_api = True self.daemon = 'wazuh-db' else: raise ValueError(f'The target {self.target} is not a valid one.') - if self.use_api == False: + if self.use_state_file == True: state_file = splitext(basename(self.statistics_file))[0] self.csv_file = join(self.dst_dir, f'{state_file}_stats.csv') else: @@ -141,24 +140,41 @@ def _parse_state_file(self): logger.error(f'Exception with {self.statistics_file} | {str(e)}') - def _parse_api_data(self, ip, port): + def _parse_api_data(self): """Read the data from the statistics file generated by Wazuh API.""" - API_URL = f"https://{ip}:{port}" + API_URL = f"https://{self.ip}:{self.port}" DAEMONS_ENDPOINT= f"/manager/daemons/stats?daemons_list={self.daemon}" TOKEN_ENDPOINT="/security/user/authenticate" logging.info("Getting statistics data from API for {}".format(self.target)) - response = requests.get(API_URL + TOKEN_ENDPOINT, verify=False, auth=requests.auth.HTTPBasicAuth("wazuh", "wazuh")) - if response.status_code != 200: - logging.error("Retrying get API data, status code {}".format(response.status_code)) - return self._parse_api_data(ip, port) - - daemons_response = requests.get(API_URL + DAEMONS_ENDPOINT, verify=False, headers={'Authorization': 'Bearer ' + response.json()['data']['token']}) - if daemons_response.status_code != 200: - logging.error("Retrying get API data, status code {}".format(response.status_code)) - return self._parse_api_data(ip, port) + max_retries = 3 + token_response = None + daemon_response = None + # Try to get the response token three times + for _ in range(max_retries): + try: + token_response = requests.get(API_URL + TOKEN_ENDPOINT, verify=False, + auth=requests.auth.HTTPBasicAuth("wazuh", "wazuh")) + if token_response.status_code != 200: + break + except requests.exceptions.RequestException as e: + logging.error(f"Error getting token from API: {str(e)}") + else: + logging.error("Retrying get API data, status code {}".format(token_response.status_code)) + + for _ in range(max_retries): + try: + daemons_response = requests.get(API_URL + DAEMONS_ENDPOINT, verify=False, + headers={'Authorization': 'Bearer ' + token_response.json()['data']['token']}) + if daemons_response.status_code != 200: + break + except requests.exceptions.RequestException as e: + logging.error(f"Error fetching {self.daemon} datafrom API: {str(e)}") + else: + logging.error("Failed to fetch daemons data after 3 attempts") + data = daemons_response.json()['data']['affected_items'][0] self._write_csv(data, self.target, self.csv_file) @@ -173,11 +189,11 @@ def _write_csv(self, data, target, csv_file): csv_file (string): path to the CSV file. """ if target == "analysis": - csv_header = headers.analysisd_header if self.use_api == False else headers.analysisd_events_header + csv_header = headers.analysisd_header if self.use_state_file == True else headers.analysisd_events_header elif target == "logcollector": csv_header = headers.logcollector_header elif target == "remote": - csv_header = headers.remoted_api_header if self.use_api == True else headers.remoted_header + csv_header = headers.remoted_header if self.use_state_file == True else headers.remoted_api_header elif target == "wazuhdb": csv_header = headers.wazuhdb_header else: @@ -192,7 +208,7 @@ def _write_csv(self, data, target, csv_file): timestamp = datetime.fromtimestamp(time()).strftime('%Y-%m-%d %H:%M:%S') - if self.use_api: + if self.use_state_file == False: format = r"%Y-%m-%dT%H:%M:%S+%f:00" datetime_timestamp = datetime.strptime(data['timestamp'], format) datetime_uptime = datetime.strptime(data['uptime'], format) @@ -558,10 +574,10 @@ def _write_csv(self, data, target, csv_file): def _monitor_stats(self): """Read the .state files and log the data into a CSV file.""" while not self.event.is_set(): - if self.use_api: - self._parse_api_data(self.ip, self.port) - else: + if self.use_state_file: self._parse_state_file() + else: + self._parse_api_data() sleep(self.time_step) @@ -575,7 +591,7 @@ def run(self): def start(self): """Start the monitoring threads.""" self.run() - if self.use_api: + if self.use_state_file: logger.info(f'Started monitoring statistics from API for {self.target}') else: logger.info(f'Started monitoring statistics from {self.statistics_file}') From 17aac758f5fb2926c49e91c5320d653996f61015 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Wed, 21 Feb 2024 15:42:57 +0100 Subject: [PATCH 14/20] fix file naming --- .../wazuh_testing/tools/performance/statistic.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index 26a4ba3d89..891f98709d 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -82,10 +82,9 @@ def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_state_ raise ValueError(f'The target {self.target} is not a valid one.') if self.use_state_file == True: - state_file = splitext(basename(self.statistics_file))[0] - self.csv_file = join(self.dst_dir, f'{state_file}_stats.csv') + self.csv_file = join(self.dst_dir, f'wazuh-{self.target}d_stats.csv') else: - self.csv_file = join(self.dst_dir, f'wazuh-{self.target}_api_stats.csv') + self.csv_file = join(self.dst_dir, f'wazuh-{self.target}d_api_stats.csv') def _parse_classic_state_file(self, data, target): From 68a79c1f08e996413c8b524936984dd4a231e041 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Wed, 21 Feb 2024 15:43:57 +0100 Subject: [PATCH 15/20] fix undo name change --- .../wazuh_testing/tools/performance/statistic.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index 891f98709d..9184b40847 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -82,9 +82,10 @@ def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_state_ raise ValueError(f'The target {self.target} is not a valid one.') if self.use_state_file == True: - self.csv_file = join(self.dst_dir, f'wazuh-{self.target}d_stats.csv') + state_file = splitext(basename(self.statistics_file))[0] + self.csv_file = join(self.dst_dir, f'wazuh-{state_file}_stats.csv') else: - self.csv_file = join(self.dst_dir, f'wazuh-{self.target}d_api_stats.csv') + self.csv_file = join(self.dst_dir, f'wazuh-{self.target}_api_stats.csv') def _parse_classic_state_file(self, data, target): From a9192a52bea729bb7a79e26a3009572eefd8e666 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Wed, 21 Feb 2024 17:15:53 +0100 Subject: [PATCH 16/20] enhance wazuh-statistic docu --- .../wazuh_testing/scripts/wazuh_statistics.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py index dbcd0a34fb..a5ecacbf7d 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py @@ -34,12 +34,13 @@ def get_script_arguments(): parser.add_argument('--store', dest='store_path', action='store', default=gettempdir(), help=f"Path to store the CSVs with the data. Default {gettempdir()}.") parser.add_argument('-u', '--use_state_file', action='store_true', default=False, - help="Determine if the state files should be used to collect the for analysisd and remoted." - "Use with remoted and analysis to get data from state files. Default False.") + help="Use state files for analysis and remote operations. " + "When used with 'remote' and 'analysis', data will be collected from state files; " + "otherwise, the API will be used. Default False") parser.add_argument('-i', '--ip', dest='ip', action='store', default='localhost', - help=f"IP for the API. Default localhost.") + help=f"Specify the IP address for the API. Default is 'localhost'") parser.add_argument('-p', '--port', dest='port', action='store', default='55000', - help=f"port for the API. Default localhost.") + help=f"Specify the port for the API. Default is '55000'") return parser.parse_args() From a1d0448b75f63ebcbe1d81489fae944108b17967 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Wed, 21 Feb 2024 17:19:49 +0100 Subject: [PATCH 17/20] add wait for complete_to_api endpoint --- .../wazuh_testing/tools/performance/statistic.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index 9184b40847..2bd1d5c307 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -66,10 +66,12 @@ def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_state_ if self.target == 'agent': self.statistics_file = tls.AGENT_STATISTICS_FILE self.use_state_file = True + logger.warning("Agentd stat monitoring from API is not supported. Will get data from state file.") elif self.target == 'logcollector': self.statistics_file = tls.LOGCOLLECTOR_STATISTICS_FILE self.use_state_file = True self.parse_json = True + logger.warning("Logcolletor stat monitoring from API is not supported. Will get data from state file.") elif self.target == 'remote': self.statistics_file = tls.REMOTE_STATISTICS_FILE self.daemon = 'wazuh-remoted' @@ -81,7 +83,7 @@ def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_state_ else: raise ValueError(f'The target {self.target} is not a valid one.') - if self.use_state_file == True: + if self.use_state_file: state_file = splitext(basename(self.statistics_file))[0] self.csv_file = join(self.dst_dir, f'wazuh-{state_file}_stats.csv') else: @@ -144,7 +146,7 @@ def _parse_api_data(self): """Read the data from the statistics file generated by Wazuh API.""" API_URL = f"https://{self.ip}:{self.port}" - DAEMONS_ENDPOINT= f"/manager/daemons/stats?daemons_list={self.daemon}" + DAEMONS_ENDPOINT= f"/manager/daemons/stats?daemons_list={self.daemon}?wait_for_complete=true" TOKEN_ENDPOINT="/security/user/authenticate" logging.info("Getting statistics data from API for {}".format(self.target)) @@ -556,7 +558,7 @@ def _write_csv(self, data, target, csv_file): logger.info("Writing remoted.state info to {}.".format(csv_file)) log.write("{0},{1},{2},{3},{4},{5},{6},{7}\n".format( timestamp, # 0 - data['queue_size'], # 1 + data['queue_size'], # 1or analysisd and remoted." data['total_queue_size'], # 2 data['tcp_sessions'], # 3 data['evt_count'], # 4 @@ -568,7 +570,7 @@ def _write_csv(self, data, target, csv_file): logger.info("Writing agentd.state info to {}.".format(csv_file)) log.write("{0},{1},{2},{3},{4},{5},{6}\n".format( timestamp, data['status'], data['last_keepalive'], - data['last_ack'], data['msg_count'], data['msg_sent'], data['msg_buffer'])) + data['last_ack'], data['msg_count']or analysisd and remoted.", data['msg_sent'], data['msg_buffer'])) def _monitor_stats(self): From b48cd5b48740e5e2721a0809757ed50ae0411f79 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Wed, 21 Feb 2024 17:20:32 +0100 Subject: [PATCH 18/20] remove unnecesary checks --- .../wazuh_testing/tools/performance/statistic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index 2bd1d5c307..c61e42cfc7 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -191,11 +191,11 @@ def _write_csv(self, data, target, csv_file): csv_file (string): path to the CSV file. """ if target == "analysis": - csv_header = headers.analysisd_header if self.use_state_file == True else headers.analysisd_events_header + csv_header = headers.analysisd_header if self.use_state_file else headers.analysisd_events_header elif target == "logcollector": csv_header = headers.logcollector_header elif target == "remote": - csv_header = headers.remoted_header if self.use_state_file == True else headers.remoted_api_header + csv_header = headers.remoted_header if self.use_state_file else headers.remoted_api_header elif target == "wazuhdb": csv_header = headers.wazuhdb_header else: From edac4529a02804af6f42e3525d627d0972cb5b38 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Wed, 21 Feb 2024 17:31:05 +0100 Subject: [PATCH 19/20] fix typo --- .../wazuh_testing/tools/performance/statistic.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index c61e42cfc7..666adf0c45 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -568,9 +568,8 @@ def _write_csv(self, data, target, csv_file): )) elif target == "agentd_state": logger.info("Writing agentd.state info to {}.".format(csv_file)) - log.write("{0},{1},{2},{3},{4},{5},{6}\n".format( - timestamp, data['status'], data['last_keepalive'], - data['last_ack'], data['msg_count']or analysisd and remoted.", data['msg_sent'], data['msg_buffer'])) + log.write("{0},{1},{2},{3},{4},{5},{6}\n".format(timestamp, data['status'], data['last_keepalive'], + data['last_ack'], data['msg_count'], data['msg_sent'], data['msg_buffer'])) def _monitor_stats(self): From 56c798a761bdfcb9775a9d5118924f1bc3c41f74 Mon Sep 17 00:00:00 2001 From: Andres Carmelo Micalizzi Casali Date: Wed, 21 Feb 2024 17:36:49 +0100 Subject: [PATCH 20/20] fix query typo --- .../wazuh_testing/wazuh_testing/tools/performance/statistic.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index 666adf0c45..4813163f22 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -146,7 +146,7 @@ def _parse_api_data(self): """Read the data from the statistics file generated by Wazuh API.""" API_URL = f"https://{self.ip}:{self.port}" - DAEMONS_ENDPOINT= f"/manager/daemons/stats?daemons_list={self.daemon}?wait_for_complete=true" + DAEMONS_ENDPOINT= f"/manager/daemons/stats?daemons_list={self.daemon}&wait_for_complete=true" TOKEN_ENDPOINT="/security/user/authenticate" logging.info("Getting statistics data from API for {}".format(self.target)) @@ -176,7 +176,6 @@ def _parse_api_data(self): logging.error(f"Error fetching {self.daemon} datafrom API: {str(e)}") else: logging.error("Failed to fetch daemons data after 3 attempts") - data = daemons_response.json()['data']['affected_items'][0] self._write_csv(data, self.target, self.csv_file)