diff --git a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py index 52fc31308c..a5ecacbf7d 100644 --- a/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py +++ b/deps/wazuh_testing/wazuh_testing/scripts/wazuh_statistics.py @@ -26,13 +26,21 @@ def get_script_arguments(): formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-t', '--target', dest='target_list', required=True, type=str, nargs='+', action='store', help='Type the statistics target to collect separated by whitespace. ' - 'Targets: agent, logcollector, remote and analysis.') + 'Targets: agent, logcollector, remote, analysis and wazuhdb') parser.add_argument('-s', '--sleep', dest='sleep_time', type=float, default=5, action='store', help='Type the time in seconds between each entry.') parser.add_argument('-d', '--debug', dest='debug', action='store_true', default=False, help='Enable debug level logging.') parser.add_argument('--store', dest='store_path', action='store', default=gettempdir(), help=f"Path to store the CSVs with the data. Default {gettempdir()}.") + parser.add_argument('-u', '--use_state_file', action='store_true', default=False, + help="Use state files for analysis and remote operations. " + "When used with 'remote' and 'analysis', data will be collected from state files; " + "otherwise, the API will be used. Default False") + parser.add_argument('-i', '--ip', dest='ip', action='store', default='localhost', + help=f"Specify the IP address for the API. Default is 'localhost'") + parser.add_argument('-p', '--port', dest='port', action='store', default='55000', + help=f"Specify the port for the API. Default is '55000'") return parser.parse_args() @@ -51,10 +59,11 @@ def main(): logger.info(f'Started new session: {CURRENT_SESSION}') for target in options.target_list: - monitor = StatisticMonitor(target=target, time_step=options.sleep_time, dst_dir=options.store_path) - monitor.start() + monitor = StatisticMonitor(target=target, time_step=options.sleep_time, dst_dir=options.store_path, + use_state_file=options.use_state_file, ip=options.ip, port=options.port) MONITOR_LIST.append(monitor) + monitor.start() if __name__ == '__main__': - main() + main() \ No newline at end of file diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index ce1f75890c..4813163f22 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -5,21 +5,25 @@ import csv import json import logging +import requests +import urllib3 +import wazuh_testing.tools as tls from datetime import datetime from os.path import basename, isfile, join, splitext from re import sub from tempfile import gettempdir from threading import Thread, Event -from time import sleep +from time import sleep, time +from wazuh_testing.tools.performance import statistic_headers as headers -import wazuh_testing.tools as tls +urllib3.disable_warnings() logger = logging.getLogger('wazuh-statistics-monitor') -logger.setLevel(logging.INFO) class StatisticMonitor: - """This class generates a Python object to monitor the statistics file generated by Wazuh. + """This class generates a Python object to monitor the statistics file generated by Wazuh. It also recovers data + using Wazuh's API for the analysisd, remoted and wazuh-db daemons. There are four files: wazuh-analysisd.state, wazuh-remoted.state, wazuh-agentd.state and wazuh-logcollector.state and each one of them has unique characteristics and data. This class will parse the file, extract the data to a @@ -32,6 +36,9 @@ class StatisticMonitor: time_step (int): Time between intervals. target (str, optional): target file to monitor. dst_dir (str, optional): path to store the file. + use_state_file (bool, optional): Determine if the API should be used to collect the data. Default False. + ip (str, optional): IP address of the API. Default localhost. + port (str, optional): Port of the API. Default 55000. Attributes: event (thread.Event): thread Event used to control the scans. @@ -40,44 +47,63 @@ class StatisticMonitor: dst_dir (str): directory to store the CSVs. Defaults to temp directory. csv_file (str): path to the CSV file. target (str): target file to monitor. + parse_json (bool): Determine if the file is a JSON file. Default False. """ - def __init__(self, target='agent', time_step=5, dst_dir=gettempdir()): + def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_state_file=False, ip = 'localhost', port = '55000'): self.event = None self.thread = None self.time_step = time_step self.target = target self.dst_dir = dst_dir + self.use_state_file = use_state_file + self.ip = ip + self.port = port + self.daemon = None self.parse_json = False + if self.target == 'agent': self.statistics_file = tls.AGENT_STATISTICS_FILE + self.use_state_file = True + logger.warning("Agentd stat monitoring from API is not supported. Will get data from state file.") elif self.target == 'logcollector': self.statistics_file = tls.LOGCOLLECTOR_STATISTICS_FILE + self.use_state_file = True self.parse_json = True + logger.warning("Logcolletor stat monitoring from API is not supported. Will get data from state file.") elif self.target == 'remote': self.statistics_file = tls.REMOTE_STATISTICS_FILE + self.daemon = 'wazuh-remoted' elif self.target == 'analysis': self.statistics_file = tls.ANALYSIS_STATISTICS_FILE + self.daemon = 'wazuh-analysisd' + elif self.target == 'wazuhdb': + self.daemon = 'wazuh-db' else: raise ValueError(f'The target {self.target} is not a valid one.') - state_file = splitext(basename(self.statistics_file))[0] - self.csv_file = join(self.dst_dir, f'{state_file}_stats.csv') + if self.use_state_file: + state_file = splitext(basename(self.statistics_file))[0] + self.csv_file = join(self.dst_dir, f'wazuh-{state_file}_stats.csv') + else: + self.csv_file = join(self.dst_dir, f'wazuh-{self.target}_api_stats.csv') + - def _parse_classic_state_file(self, data): + def _parse_classic_state_file(self, data, target): """Parse the info from the .state files from Wazuh with shell compatible format. Args: data (dict): dictionary to store the data of the file. + target (string): specifies which CSV must be generated """ with open(self.statistics_file) as state_file: for line in state_file: if line.rstrip() and line.rstrip()[0] != '#': key, value = line.splitlines()[0].split('=') data[key] = value.split("'")[1] + self._write_csv(data, target, self.csv_file) - self._write_csv(data, self.csv_file) def _parse_logcollector_state_file(self, data): """Parse the info from the .state files from Wazuh with shell compatible format. @@ -87,7 +113,6 @@ def _parse_logcollector_state_file(self, data): """ with open(self.statistics_file) as state_file: state_info = json.load(state_file) - for file in state_info['global']['files']: if isfile(file['location']): csv_name = sub(r'\.', '_', basename(file['location'])) @@ -101,53 +126,476 @@ def _parse_logcollector_state_file(self, data): file_data['bytes'] = file['bytes'] file_data['target'] = target['name'] file_data['target_drops'] = target['drops'] - self._write_csv(file_data, join(self.dst_dir, f'{csv_name}.csv')) + self._write_csv(file_data, 'logcollector', join(self.dst_dir, f'{csv_name}.csv')) + def _parse_state_file(self): """Read the data from the statistics file generated by Wazuh.""" try: logging.info("Getting statistics data from {}".format(self.statistics_file)) - data = {'Timestamp': datetime.now().strftime('%Y/%m/%d %H:%M:%S')} + data = {} if not self.parse_json: - self._parse_classic_state_file(data) + self._parse_classic_state_file(data, self.target) else: self._parse_logcollector_state_file(data) except Exception as e: logger.error(f'Exception with {self.statistics_file} | {str(e)}') - @staticmethod - def _write_csv(data, csv_file): + + def _parse_api_data(self): + """Read the data from the statistics file generated by Wazuh API.""" + + API_URL = f"https://{self.ip}:{self.port}" + DAEMONS_ENDPOINT= f"/manager/daemons/stats?daemons_list={self.daemon}&wait_for_complete=true" + TOKEN_ENDPOINT="/security/user/authenticate" + + logging.info("Getting statistics data from API for {}".format(self.target)) + + max_retries = 3 + token_response = None + daemon_response = None + # Try to get the response token three times + for _ in range(max_retries): + try: + token_response = requests.get(API_URL + TOKEN_ENDPOINT, verify=False, + auth=requests.auth.HTTPBasicAuth("wazuh", "wazuh")) + if token_response.status_code != 200: + break + except requests.exceptions.RequestException as e: + logging.error(f"Error getting token from API: {str(e)}") + else: + logging.error("Retrying get API data, status code {}".format(token_response.status_code)) + + for _ in range(max_retries): + try: + daemons_response = requests.get(API_URL + DAEMONS_ENDPOINT, verify=False, + headers={'Authorization': 'Bearer ' + token_response.json()['data']['token']}) + if daemons_response.status_code != 200: + break + except requests.exceptions.RequestException as e: + logging.error(f"Error fetching {self.daemon} datafrom API: {str(e)}") + else: + logging.error("Failed to fetch daemons data after 3 attempts") + + data = daemons_response.json()['data']['affected_items'][0] + self._write_csv(data, self.target, self.csv_file) + + + def _write_csv(self, data, target, csv_file): """Write the data collected from the .state into a CSV file. Args: data (dict): dictionary containing the info from the .state file. + target (string): specifies which CSV must be generated csv_file (string): path to the CSV file. """ + if target == "analysis": + csv_header = headers.analysisd_header if self.use_state_file else headers.analysisd_events_header + elif target == "logcollector": + csv_header = headers.logcollector_header + elif target == "remote": + csv_header = headers.remoted_header if self.use_state_file else headers.remoted_api_header + elif target == "wazuhdb": + csv_header = headers.wazuhdb_header + else: + csv_header = headers.agentd_header + header = not isfile(csv_file) - with open(csv_file, 'a', newline='') as f: - csv_writer = csv.writer(f) + + with open(csv_file, 'a+') as log: + if header: - csv_writer.writerow(list(data)) + log.write(f'{",".join(csv_header)}\n') + + timestamp = datetime.fromtimestamp(time()).strftime('%Y-%m-%d %H:%M:%S') + + if self.use_state_file == False: + format = r"%Y-%m-%dT%H:%M:%S+%f:00" + datetime_timestamp = datetime.strptime(data['timestamp'], format) + datetime_uptime = datetime.strptime(data['uptime'], format) + interval = (datetime_timestamp - datetime_uptime).total_seconds() + + if target == "analysis": + metrics = data['metrics'] + decoded = metrics['events']['received_breakdown']['decoded_breakdown'] + decoded_modules = decoded['modules_breakdown'] + dropped = metrics['events']['received_breakdown']['dropped_breakdown'] + dropped_modules = dropped['modules_breakdown'] + written = metrics['events']['written_breakdown'] + + logger.info("Writing Analysisd events info to {}.".format(csv_file)) + log.write(("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},{19},{20},"+ + "{21},{22},{23},{24},{25},{26},{27},{28},{29},{30},{31},{32},{33},{34},{35},{36},{37},{38},{39},{40},{41},"+ + "{42},{43},{44},{45},{46},{47},{48},{49},{50},{51},{52},{53},{54},{55},{56},{57},{58},{59},{60},{61},{62},"+ + "{63}\n").format( + timestamp, # 0 + data['timestamp'], # 1 + interval, # 2 + metrics['events']['processed'], # 3 + metrics['events']['received'], # 4 + decoded_modules['azure'], # 5 + decoded_modules['ciscat'], # 6 + decoded_modules['command'], # 7 + decoded_modules['docker'], # 8 + decoded_modules['logcollector_breakdown']['eventchannel'], # 9 + decoded_modules['logcollector_breakdown']['eventlog'], # 10 + decoded_modules['logcollector_breakdown']['macos'], # 11 + decoded_modules['logcollector_breakdown']['others'], # 12 + decoded_modules['osquery'], # 13 + decoded_modules['rootcheck'], # 14 + decoded_modules['sca'], # 15 + decoded_modules['syscheck'], # 16 + decoded_modules['syscollector'], # 17 + decoded_modules['vulnerability'], # 18 + decoded['agent'], # 19 + decoded['dbsync'], # 20 + decoded['monitor'], # 21 + decoded['remote'], # 22 + + dropped_modules['azure'], # 23 + dropped_modules['ciscat'], # 24 + dropped_modules['command'], # 25 + dropped_modules['docker'], # 26 + dropped_modules['logcollector_breakdown']['eventchannel'], # 27 + dropped_modules['logcollector_breakdown']['eventlog'], # 28 + dropped_modules['logcollector_breakdown']['macos'], # 29 + dropped_modules['logcollector_breakdown']['others'], # 30 + dropped_modules['osquery'], # 31 + dropped_modules['rootcheck'], # 32 + dropped_modules['sca'], # 33 + dropped_modules['syscheck'], # 34 + dropped_modules['syscollector'], # 35 + dropped_modules['vulnerability'], # 36 + dropped['agent'], # 37 + dropped['dbsync'], # 38 + dropped['monitor'], # 39 + dropped['remote'], # 40 + + written['alerts'], # 41 + written['archives'], # 42 + written['firewall'], # 43 + written['fts'], # 44 + written['stats'], # 45 + + decoded_modules['azure'] / interval, # 46 + decoded_modules['ciscat'] / interval, # 47 + decoded_modules['command'] / interval, # 48 + decoded_modules['docker'] / interval, # 49 + decoded_modules['logcollector_breakdown']['eventchannel'] / interval, # 50 + decoded_modules['logcollector_breakdown']['eventlog'] / interval, # 51 + decoded_modules['logcollector_breakdown']['macos'] / interval, # 52 + decoded_modules['logcollector_breakdown']['others'] / interval, # 53 + decoded_modules['osquery'] / interval, # 54 + decoded_modules['rootcheck'] / interval, # 55 + decoded_modules['sca'] / interval, # 56 + decoded_modules['syscheck'] / interval, # 57 + decoded_modules['syscollector'] / interval, # 58 + decoded_modules['vulnerability'] / interval, # 59 + decoded['agent'] / interval, # 60 + decoded['dbsync'] / interval, # 61 + decoded['monitor'] / interval, # 62 + decoded['remote'] / interval, # 63 + )) + elif target == "remote": + metrics = data['metrics'] + received_messages = metrics['messages']['received_breakdown'] + sent_messages = metrics['messages']['sent_breakdown'] + logger.info("Writing remoted data from API info to {}.".format(csv_file)) + log.write(("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},"+ + "{19},{20},{21},{22},{23},{24}\n").format( + timestamp, # 0 + data['timestamp'], # 1 + interval, # 2 + metrics['queues']['received']['size'], # 3 + metrics['queues']['received']['usage'], # 4 + metrics['tcp_sessions'], # 5 + metrics['keys_reload_count'], # 6 + received_messages['control'], # 7 + received_messages['control_breakdown']['keepalive'], # 8 + received_messages['control_breakdown']['request'], # 9 + received_messages['control_breakdown']['shutdown'], # 10 + received_messages['control_breakdown']['startup'], # 11 + received_messages['dequeued_after'], # 12 + received_messages['discarded'], # 13 + received_messages['event'], # 14 + received_messages['ping'], # 15 + received_messages['unknown'], # 16 + sent_messages['ack'], # 17 + sent_messages['ar'], # 18 + sent_messages['discarded'], # 19 + sent_messages['request'], # 20 + sent_messages['sca'], # 21 + sent_messages['shared'], # 22 + metrics['bytes']['received'], # 23 + metrics['bytes']['sent'] # 24 + )) + else: + received_breakdown = data['metrics']['queries']['received_breakdown'] + ag_bd = received_breakdown['agent_breakdown'] + glob_bd = received_breakdown['global_breakdown'] + execution_breakdown = data['metrics']['time']['execution_breakdown'] + exec_ag_bd = execution_breakdown['agent_breakdown'] + exec_glob_bd = execution_breakdown['global_breakdown'] + logger.info("Writing wazuh-db data from API info to {}.".format(csv_file)) + log.write(("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},"+ + "{19},{20},{21},{22},{23},{24},{25},{26},{27},{28},{29},{30},{31},{32},{33},{34},{35},"+ + "{36},{37},{38},{39},{40},{41},{42},{43},{44},{45},{46},{47},{48},{49},{50},{51},{52},"+ + "{53},{54},{55},{56},{57},{58},{59},{60},{61},{62},{63},{64},{65},{66},{67},{68},{69},"+ + "{70},{71},{72},{73},{74},{75},{76},{77},{78},{79},{70},{71},{72},{73},{74},{75},{76},"+ + "{77},{78},{79},{80},{81},{82},{83},{84},{85},{86},{87},{88},{89},{90},{91},{92},{93},"+ + "{94},{95},{96},{97},{98},{99},{100},{101},{102},{103},{104},{105},{106},{107},{108},"+ + "{109},{110},{11},{111},{112},{113},{114},{115},{116},{117},{118},{119},{120},{121},"+ + "{122},{123},{124},{125},{126},{127},{128},{129},{130},{131},{132},{133},{134},{135},"+ + "{136},{137},{138},{139},{140},{141},{142},{143},{145},{146},{147},{148}\n").format( + timestamp, # 0 + data['timestamp'], # 1 + interval, # 2 + data['metrics']['queries']['received'], # 3 + received_breakdown['agent'], # 4 + ag_bd['db']['begin'], # 5 + ag_bd['db']['close'], # 6 + ag_bd['db']['commit'], # 7 + ag_bd['db']['remove'], # 8 + ag_bd['db']['sql'], # 9 + ag_bd['db']['vacuum'], # 10 + ag_bd['db']['get_fragmentation'], # 11 + ag_bd['tables']['ciscat']['ciscat'], # 12 + ag_bd['tables']['rootcheck']['rootcheck'], # 13 + ag_bd['tables']['sca']['sca'], # 14 + ag_bd['tables']['sync']['dbsync'], # 15 + ag_bd['tables']['syscheck']['syscheck'], # 16 + ag_bd['tables']['syscheck']['fim_file'], # 17 + ag_bd['tables']['syscheck']['fim_registry'], # 18 + ag_bd['tables']['syscheck']['fim_registry_key'], # 19 + ag_bd['tables']['syscheck']['fim_registry_value'], # 20 + ag_bd['tables']['syscollector']['syscollector_hotfixes'], # 21 + ag_bd['tables']['syscollector']['syscollector_hwinfo'], # 22 + ag_bd['tables']['syscollector']['syscollector_network_address'], # 23 + ag_bd['tables']['syscollector']['syscollector_network_iface'], # 24 + ag_bd['tables']['syscollector']['syscollector_network_protocol'], # 25 + ag_bd['tables']['syscollector']['syscollector_osinfo'], # 26 + ag_bd['tables']['syscollector']['syscollector_packages'], # 27 + ag_bd['tables']['syscollector']['syscollector_ports'], # 28 + ag_bd['tables']['syscollector']['syscollector_processes'], # 29 + ag_bd['tables']['vulnerability']['vuln_cves'], # 30 + received_breakdown['global'], # 31 + glob_bd['db']['backup'], # 32 + glob_bd['db']['sql'], # 33 + glob_bd['db']['vacuum'], # 34 + glob_bd['db']['get_fragmentation'], # 35 + glob_bd['tables']['agent']['delete-agent'], # 36 + glob_bd['tables']['agent']['disconnect-agents'], # 37 + glob_bd['tables']['agent']['find-agent'], # 38 + glob_bd['tables']['agent']['get-agent-info'], # 39 + glob_bd['tables']['agent']['get-agents-by-connection-status'], # 40 + glob_bd['tables']['agent']['get-all-agents'], # 41 + glob_bd['tables']['agent']['get-distinct-groups'], # 42 + glob_bd['tables']['agent']['get-groups-integrity'], # 43 + glob_bd['tables']['agent']['insert-agent'], # 44 + glob_bd['tables']['agent']['reset-agents-connection'], # 45 + glob_bd['tables']['agent']['select-agent-group'], # 46 + glob_bd['tables']['agent']['select-agent-name'], # 47 + glob_bd['tables']['agent']['set-agent-groups'], # 48 + glob_bd['tables']['agent']['sync-agent-groups-get'], # 49 + glob_bd['tables']['agent']['sync-agent-info-get'], # 50 + glob_bd['tables']['agent']['sync-agent-info-set'], # 51 + glob_bd['tables']['agent']['update-agent-data'], # 52 + glob_bd['tables']['agent']['update-agent-name'], # 53 + glob_bd['tables']['agent']['update-connection-status'], # 54 + glob_bd['tables']['agent']['update-status-code'], # 55 + glob_bd['tables']['agent']['update-keepalive'], # 56 + glob_bd['tables']['belongs']['get-group-agents'], # 57 + glob_bd['tables']['belongs']['select-group-belong'], # 58 + glob_bd['tables']['group']['delete-group'], # 59 + glob_bd['tables']['group']['find-group'], # 60 + glob_bd['tables']['group']['insert-agent-group'], # 61 + glob_bd['tables']['group']['select-groups'], # 62 + glob_bd['tables']['labels']['get-labels'], # 63 + received_breakdown['mitre'], # 64 + received_breakdown['task'], # 65 + received_breakdown['task_breakdown']['tables']['tasks']['delete_old'], # 66 + received_breakdown['task_breakdown']['tables']['tasks']['set_timeout'], # 67 + received_breakdown['task_breakdown']['tables']['tasks']['upgrade'], # 68 + received_breakdown['task_breakdown']['tables']['tasks']['upgrade_cancel_tasks'], # 69 + received_breakdown['task_breakdown']['tables']['tasks']['upgrade_custom'], # 70 + received_breakdown['task_breakdown']['tables']['tasks']['upgrade_get_status'], # 71 + received_breakdown['task_breakdown']['tables']['tasks']['upgrade_result'], # 72 + received_breakdown['task_breakdown']['tables']['tasks']['upgrade_update_status'], # 73 + received_breakdown['wazuhdb'], # 74 + + data['metrics']['time']['execution'], # 75 + execution_breakdown['agent'], # 76 + exec_ag_bd['db']['open'], # 77 + exec_ag_bd['db']['begin'], # 78 + exec_ag_bd['db']['close'], # 79 + exec_ag_bd['db']['commit'], # 80 + exec_ag_bd['db']['remove'], # 81 + exec_ag_bd['db']['sql'], # 82 + exec_ag_bd['db']['vacuum'], # 83 + exec_ag_bd['db']['get_fragmentation'], # 84 + exec_ag_bd['tables']['ciscat']['ciscat'], # 85 + exec_ag_bd['tables']['rootcheck']['rootcheck'], # 86 + exec_ag_bd['tables']['sca']['sca'], # 87 + exec_ag_bd['tables']['sync']['dbsync'], # 88 + exec_ag_bd['tables']['syscheck']['syscheck'], # 89 + exec_ag_bd['tables']['syscheck']['fim_file'], # 90 + exec_ag_bd['tables']['syscheck']['fim_registry'], # 91 + exec_ag_bd['tables']['syscheck']['fim_registry_key'], # 92 + exec_ag_bd['tables']['syscheck']['fim_registry_value'], # 93 + exec_ag_bd['tables']['syscollector']['syscollector_hotfixes'], # 94 + exec_ag_bd['tables']['syscollector']['syscollector_hwinfo'], # 95 + exec_ag_bd['tables']['syscollector']['syscollector_network_address'], # 96 + exec_ag_bd['tables']['syscollector']['syscollector_network_iface'], # 97 + exec_ag_bd['tables']['syscollector']['syscollector_network_protocol'], # 98 + exec_ag_bd['tables']['syscollector']['syscollector_osinfo'], # 99 + exec_ag_bd['tables']['syscollector']['syscollector_packages'], # 100 + exec_ag_bd['tables']['syscollector']['syscollector_ports'], # 101 + exec_ag_bd['tables']['syscollector']['syscollector_processes'], # 102 + exec_ag_bd['tables']['vulnerability']['vuln_cves'], # 103 + data['metrics']['time']['execution'], # 104 + exec_glob_bd['db']['open'], # 105 + exec_glob_bd['db']['backup'], # 106 + exec_glob_bd['db']['sql'], # 107 + exec_glob_bd['db']['vacuum'], # 108 + exec_glob_bd['db']['get_fragmentation'], # 109 + exec_glob_bd['tables']['agent']['delete-agent'], # 110 + exec_glob_bd['tables']['agent']['disconnect-agents'], # 111 + exec_glob_bd['tables']['agent']['find-agent'], # 112 + exec_glob_bd['tables']['agent']['get-agent-info'], # 113 + exec_glob_bd['tables']['agent']['get-agents-by-connection-status'], # 114 + exec_glob_bd['tables']['agent']['get-all-agents'], # 115 + exec_glob_bd['tables']['agent']['get-distinct-groups'], # 116 + exec_glob_bd['tables']['agent']['get-groups-integrity'], # 117 + exec_glob_bd['tables']['agent']['insert-agent'], # 118 + exec_glob_bd['tables']['agent']['reset-agents-connection'], # 119 + exec_glob_bd['tables']['agent']['select-agent-group'], # 120 + exec_glob_bd['tables']['agent']['select-agent-name'], # 121 + exec_glob_bd['tables']['agent']['set-agent-groups'], # 122 + exec_glob_bd['tables']['agent']['sync-agent-groups-get'], # 123 + exec_glob_bd['tables']['agent']['sync-agent-info-get'], # 124 + exec_glob_bd['tables']['agent']['sync-agent-info-set'], # 125 + exec_glob_bd['tables']['agent']['update-agent-data'], # 126 + exec_glob_bd['tables']['agent']['update-agent-name'], # 127 + exec_glob_bd['tables']['agent']['update-connection-status'], # 128 + exec_glob_bd['tables']['agent']['update-status-code'], # 129 + exec_glob_bd['tables']['agent']['update-keepalive'], # 130 + exec_glob_bd['tables']['belongs']['get-group-agents'], # 131 + exec_glob_bd['tables']['belongs']['select-group-belong'], # 132 + exec_glob_bd['tables']['group']['delete-group'], # 133 + exec_glob_bd['tables']['group']['find-group'], # 134 + exec_glob_bd['tables']['group']['insert-agent-group'], # 135 + exec_glob_bd['tables']['group']['select-groups'], # 136 + exec_glob_bd['tables']['labels']['get-labels'], # 137 + execution_breakdown['mitre'], # 138 + execution_breakdown['task'], # 139 + execution_breakdown['task_breakdown']['tables']['tasks']['delete_old'], # 140 + execution_breakdown['task_breakdown']['tables']['tasks']['set_timeout'], # 141 + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade'], # 142 + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_cancel_tasks'], # 143 + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_custom'], # 144 + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_get_status'], # 145 + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_result'], # 146 + execution_breakdown['task_breakdown']['tables']['tasks']['upgrade_update_status'], # 147 + execution_breakdown['wazuhdb'] # 148 + )) + else: + if target == "analysis": + logger.info("Writing analysisd.state info to {}.".format(csv_file)) + log.write(("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11},{12},{13},{14},{15},{16},{17},{18},{19},{20},"+ + "{21},{22},{23},{24},{25},{26},{27},{28},{29},{30},{31},{32},{33},{34},{35},{36},{37},{38},{39},{40},{41},{42}\n") + .format( + timestamp, # 0 + data['total_events_decoded'], # 1 + data['syscheck_events_decoded'], # 2 + data['syscollector_events_decoded'], # 3 + data['rootcheck_events_decoded'], # 4 + data['sca_events_decoded'], # 5 + data['winevt_events_decoded'], # 6 + data['dbsync_messages_dispatched'], # 7 + data['other_events_decoded'], # 8 + data['events_processed'], # 9 + data['events_received'], # 10 + data['events_dropped'], # 11 + data['alerts_written'], # 12 + data['firewall_written'], # 13 + data['fts_written'], # 14 + data['syscheck_queue_usage'], # 15 + data['syscheck_queue_size'], # 16 + data['syscollector_queue_usage'], # 17 + data['syscollector_queue_size'], # 18 + data['rootcheck_queue_usage'], # 19 + data['rootcheck_queue_size'], # 20 + data['sca_queue_usage'], # 21 + data['sca_queue_size'], # 22 + data['hostinfo_queue_usage'], # 23 + data['hostinfo_queue_size'], # 24 + data['winevt_queue_usage'], # 25 + data['winevt_queue_size'], # 26 + data['dbsync_queue_usage'], # 27 + data['dbsync_queue_size'], # 28 + data['upgrade_queue_usage'], # 29 + data['upgrade_queue_size'], # 30 + data['event_queue_usage'], # 31 + data['event_queue_size'], # 32 + data['rule_matching_queue_usage'], # 33 + data['rule_matching_queue_size'], # 34 + data['alerts_queue_usage'], # 35 + data['alerts_queue_size'], # 36 + data['firewall_queue_usage'], # 37 + data['firewall_queue_size'], # 38 + data['statistical_queue_usage'], # 39 + data['statistical_queue_size'], # 40 + data['archives_queue_usage'], # 41 + data['archives_queue_size'] # 42 + )) + elif target == "logcollector": + logger.info("Writing logcollector info to {}.".format(csv_file)) + log.write("{0},{1},{2},{3},{4},{5}\n".format( + timestamp, data['location'], data['events'], data['bytes'], data['target'], data['target_drops'])) + + elif target == "remote": + logger.info("Writing remoted.state info to {}.".format(csv_file)) + log.write("{0},{1},{2},{3},{4},{5},{6},{7}\n".format( + timestamp, # 0 + data['queue_size'], # 1or analysisd and remoted." + data['total_queue_size'], # 2 + data['tcp_sessions'], # 3 + data['evt_count'], # 4 + data['ctrl_msg_count'], # 5 + data['discarded_count'], # 6 + data['recv_bytes'] # 7 + )) + elif target == "agentd_state": + logger.info("Writing agentd.state info to {}.".format(csv_file)) + log.write("{0},{1},{2},{3},{4},{5},{6}\n".format(timestamp, data['status'], data['last_keepalive'], + data['last_ack'], data['msg_count'], data['msg_sent'], data['msg_buffer'])) - csv_writer.writerow(list(data.values())) - logger.debug(f'Added new entry in {csv_file}') def _monitor_stats(self): """Read the .state files and log the data into a CSV file.""" while not self.event.is_set(): - self._parse_state_file() + if self.use_state_file: + self._parse_state_file() + else: + self._parse_api_data() sleep(self.time_step) + def run(self): """Run the event and thread monitoring functions.""" self.event = Event() self.thread = Thread(target=self._monitor_stats) self.thread.start() + def start(self): """Start the monitoring threads.""" self.run() - logger.info(f'Started monitoring statistics from {self.statistics_file}') + if self.use_state_file: + logger.info(f'Started monitoring statistics from API for {self.target}') + else: + logger.info(f'Started monitoring statistics from {self.statistics_file}') + def shutdown(self): """Stop all the monitoring threads.""" diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py new file mode 100644 index 0000000000..a7b15c0423 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py @@ -0,0 +1,337 @@ +analysisd_events_header = ["Timestamp", + "API Timestamp", + "Interval (Timestamp-Uptime)", + "Events processed", + "Events received", + + "Decoded from azure", + "Decoded from ciscat", + "Decoded from command", + "Decoded from docker", + "Decoded from logcollector eventchannel", + "Decoded from logcollector eventlog", + "Decoded from logcollector macos", + "Decoded from logcollector others", + "Decoded from osquery", + "Decoded from rootcheck", + "Decoded from sca", + "Decoded from syscheck", + "Decoded from syscollector", + "Decoded from vulnerability", + "Decoded from agentd", + "Decoded from dbsync", + "Decoded from monitor", + "Decoded from remote", + + "Dropped from azure", + "Dropped from ciscat", + "Dropped from command", + "Dropped from docker", + "Dropped from logcollector eventchannel", + "Dropped from logcollector eventlog", + "Dropped from logcollector macos", + "Dropped from logcollector others", + "Dropped from osquery", + "Dropped from rootcheck", + "Dropped from sca", + "Dropped from syscheck", + "Dropped from syscollector", + "Dropped from vulnerability", + "Dropped from agentd", + "Dropped from dbsync", + "Dropped from monitor", + "Dropped from remote", + + "Written alerts", + "Written archives", + "Written firewall", + "Written fts", + "Written stats", + + "EDPS from azure", + "EDPS from ciscat", + "EDPS from command", + "EDPS from docker", + "EDPS from logcollector eventchannel", + "EDPS from logcollector eventlog", + "EDPS from logcollector macos", + "EDPS from logcollector others", + "EDPS from osquery", + "EDPS from rootcheck", + "EDPS from sca", + "EDPS from syscheck", + "EDPS from syscollector", + "EDPS from vulnerability", + "EDPS from agentd", + "EDPS from dbsync", + "EDPS from monitor", + "EDPS from remote" + ] +analysisd_header = ["Timestamp", + "Total Events", + "Syscheck Events Decoded", + "Syscollector Events Decoded", + "Rootcheck Events Decoded", + "SCA Events Decoded", + "WinEvt Events Decoded", + "DBSync Messages dispatched", + "Other Events Decoded", + "Events processed (Rule matching)", + "Events received", + "Events dropped", + "Alerts written", + "Firewall alerts written", + "FTS alerts written", + "Syscheck queue usage", + "Syscheck queue size", + "Syscollector queue usage", + "Syscollector queue size", + "Rootcheck queue usage", + "Rootcheck queue size", + "SCA queue usage", + "SCA queue size", + "Hostinfo queue usage", + "Hostinfo queue size", + "Winevt queue usage", + "Winevt queue size", + "DBSync queue usage", + "DBSync queue size", + "Upgrade queue usage", + "Upgrade queue size", + "Event queue usage", + "Event queue size", + "Rule matching queue usage", + "Rule matching queue size", + "Alerts log queue usage", + "Alerts log queue size", + "Firewall log queue usage", + "Firewall log queue size", + "Statistical log queue usage", + "Statistical log queue size", + "Archives log queue usage", + "Archives log queue size" + ] +logcollector_header = ["Timestamp", + "Location", + "Events", + "Bytes", + "Target", + "Target Drops" + ] +remoted_header = ["Timestamp", + "Queue size", + "Total Queue size", + "TCP sessions", + "Events count", + "Control messages", + "Discarded messages", + "Bytes received"] + +remoted_api_header = ["Timestamp", + "API Timestamp", + "Interval (Timestamp-Uptime)", + "Queue size", + "Queue usage", + "TCP sessions", + "Keys reload count", + + "Control messages", + "Control keepalives", + "Control requests", + "Control shutdown", + "Control startup", + + "Dequeued messages", + "Discarded messages", + "Events count", + "Ping messages", + "Unknown messages", + + "Sent ack", + "Sent ar", + "Sent discarded", + "Sent request", + "Sent sca", + "Sent shared", + + "Metrics-Bytes received", + "Metrics-Bytes sent"] + +agentd_header = ["Timestamp", + "Status", + "Last Keepalive", + "Last ACK", + "Number of generated events", + "Number of messages", + "Number of events buffered"] + + +wazuhdb_header = ["Timestamp", + "API Timestamp", + "Interval (Timestamp-Uptime)", + ## QUERIES COUNTS + "Received queries", + "Agent queries", + ## Agent QueriesBreakdown + "db-begin", + "db-close", + "db-commit", + "db-remove", + "db-sql", + "db-vacuum", + "db-get_fragmentation", + ## Agent Tables Breakdown + "Table CisCat", + "Table Rootcheck", + "Table SCA", + "Table dbsync", + "Table Syscheck", + "Table Syscheck file", + "Table Syscheck registry", + "Table Syscheck registry_key", + "Table Syscheck registry_value", + "Table Syscollector hotfixes", + "Table Syscollector hwinfo", + "Table Syscollector network_address", + "Table Syscollector network_iface", + "Table Syscollector network_protocol", + "Table Syscollector os_info", + "Table Syscollector packages", + "Table Syscollector ports", + "Table Syscollector processes", + "Table Vulnerability CVEs", + + "Global queries", + ## Global Queries Breakdown + "db-backup", + "db-sql", + "db-vacuum", + "db-get_fragmentation", + + "agent-delete-agent", + "agent-disconnect-agents", + "agent-find-agent", + "agent-get-agent-info", + "agent-get-agents-by-connection-status", + "agent-get-all-agents", + "agent-get-distinct-groups", + "agent-get-groups-integrity", + "agent-insert-agent", + "agent-reset-agents-connection", + "agent-select-agent-group", + "agent-select-agent-name", + "agent-set-agent-groups", + "agent-sync-agent-groups-get", + "agent-sync-agent-info-get", + "agent-sync-agent-info-set", + "agent-update-agent-data", + "agent-update-agent-name", + "agent-update-connection-status", + "agent-update-status-code", + "agent-update-keepalive", + + "belongs-get-group-agents", + "belongs-select-group-belong", + + "group-delete-group", + "group-find-group", + "group-insert-agent-group", + "group-select-groups", + "labels-get-labels", + + "MITRE", + "Tasks", + ## tasks breakdown + "tasks-delete old task", + "tasks-set timeout", + "tasks-upgrade", + "tasks-upgrade cancel", + "tasks-upgrade custom", + "tasks-upgrade get status", + "tasks-upgrade results", + "tasks-upgrade update status", + "Wazuhdb", + + ## QUERIES TIME METRICS - IN MILISECONDS + "Total Execution Time", + "Agent ExecTime", + ## Agent Time Breakdown + "db-open", + "db-begin", + "db-close", + "db-commit", + "db-remove", + "db-sql", + "db-vacuum", + "db-get_fragmentation", + ## Agent Tables Breakdown + "Table CisCat", + "Table Rootcheck", + "Table SCA", + "Table dbsync", + "Table Syscheck", + "Table Syscheck file", + "Table Syscheck registry", + "Table Syscheck registry_key", + "Table Syscheck registry_value", + "Table Syscollector hotfixes", + "Table Syscollector hwinfo", + "Table Syscollector network_address", + "Table Syscollector network_iface", + "Table Syscollector network_protocol", + "Table Syscollector os_info", + "Table Syscollector packages", + "Table Syscollector ports", + "Table Syscollector processes", + "Table Vulnerability CVEs", + + ## Global Queries Time Breakdown + "Global Queries ExecTime", + "db-open", + "db-backup", + "db-sql", + "db-vacuum", + "db-get_fragmentation", + ## Global Tables breakdown + "agent-delete-agent", + "agent-disconnect-agents", + "agent-find-agent", + "agent-get-agent-info", + "agent-get-agents-by-connection-status", + "agent-get-all-agents", + "agent-get-distinct-groups", + "agent-get-groups-integrity", + "agent-insert-agent", + "agent-reset-agents-connection", + "agent-select-agent-group", + "agent-select-agent-name", + "agent-set-agent-groups", + "agent-sync-agent-groups-get", + "agent-sync-agent-info-get", + "agent-sync-agent-info-set", + "agent-update-agent-data", + "agent-update-agent-name", + "agent-update-connection-status", + "agent-update-status-code", + "agent-update-keepalive", + "belongs-get-group-agents", + "belongs-select-group-belong", + "group-delete-group", + "group-find-group", + "group-insert-agent-group", + "group-select-groups", + "labels-get-labels", + + "MITRE", + "Tasks", + ## tasks breakdown + "tasks-delete old task", + "tasks-set timeout", + "tasks-upgrade", + "tasks-upgrade cancel", + "tasks-upgrade custom", + "tasks-upgrade get status", + "tasks-upgrade results", + "tasks-upgrade update status", + "Wazuhdb" + ]