diff --git a/CHANGELOG.md b/CHANGELOG.md index 58e9ae6fed..45b07e0886 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,8 +8,10 @@ All notable changes to this project will be documented in this file. ### Added -- Add Workflow module to Wazuh-qa repository ([#4990](https://github.com/wazuh/wazuh-qa/pull/4990)) \- (Tests) - Add integration tests for Update field to CPE_Helper. ([#4574](https://github.com/wazuh/wazuh-qa/pull/4574)) \- (Core) +- Updated integration tests README ([#4742](https://github.com/wazuh/wazuh-qa/pull/4742)) \- (Framework) +- Removed configobj library from requirements.txt ([#4803](https://github.com/wazuh/wazuh-qa/pull/4803)) \- (Framework) +- Add Workflow module to Wazuh-qa repository ([#4990](https://github.com/wazuh/wazuh-qa/pull/4990)) \- (Tests) ### Changed @@ -18,24 +20,16 @@ All notable changes to this project will be documented in this file. ### Fixed - Add an IT to check that the agent erases its wazuh-agent.state file ([#4716](https://github.com/wazuh/wazuh-qa/pull/4716)) \- (Core) - -## [4.8.2] - TBD - -## [4.8.1] - TBD - -### Changed - -- Removed configobj library from requirements.txt ([#4803](https://github.com/wazuh/wazuh-qa/pull/4803)) \- (Framework) -- Updated integration tests README ([#4742](https://github.com/wazuh/wazuh-qa/pull/4742)) \- (Framework) - -### Fixed - - Fix manager_agent system tests environment ([#4808](https://github.com/wazuh/wazuh-qa/pull/4808)) \- (Framework) +- Fixed agent_simulator response for active-response configuration commands. ([#4895](https://github.com/wazuh/wazuh-qa/pull/4895)) \- (Framework) ## [4.8.0] - TBD ### Added +- Add functionality to obtain statistics and metrics from the indexer. ([#5090](https://github.com/wazuh/wazuh-qa/pull/5090)) \- (Framework) +- Add support for the installation/uninstallation of npm packages ([#5092](https://github.com/wazuh/wazuh-qa/pull/5092)) \- (Tests) +- Add alert.json file to Vulnerability Detector E2E test report ([#5147](https://github.com/wazuh/wazuh-qa/pull/5147)) \- (Framework) - Add documentation about markers for system tests ([#5080](https://github.com/wazuh/wazuh-qa/pull/5080)) \- (Documentation) - Add AWS Custom Buckets Integration tests ([#4675](https://github.com/wazuh/wazuh-qa/pull/4675)) \- (Framework + Tests) - Add Vulnerability Detector end to end tests ([#4878](https://github.com/wazuh/wazuh-qa/pull/4878)) \- (Framework + Tests) @@ -52,6 +46,8 @@ All notable changes to this project will be documented in this file. ### Changed +- Replace timestamp filter with vulnerabilities detected_at field.([#5266](https://github.com/wazuh/wazuh-qa/pull/5266)) \- (Framework + Tests) +- Changes macOS packages with new ones that generate vulnerabilities ([#5174](https://github.com/wazuh/wazuh-qa/pull/5174)) \- (Tests) - Refactor initial scan Vulnerability E2E tests ([#5081](https://github.com/wazuh/wazuh-qa/pull/5081)) \- (Framework + Tests) - Update Packages in TestScanSyscollectorCases ([#4997](https://github.com/wazuh/wazuh-qa/pull/4997)) \- (Framework + Tests) - Reduced test_shutdown_message runtime ([#4986](https://github.com/wazuh/wazuh-qa/pull/4986)) \- (Tests) @@ -79,6 +75,15 @@ All notable changes to this project will be documented in this file. ### Fixed +- Fix packages in Windows and macOS upgrade cases ([#5223](https://github.com/wazuh/wazuh-qa/pull/5223)) \- (Framework + Tests) +- Fix vulnerabilities and add new packages to Vulnerability Detector E2E tests ([#5234](https://github.com/wazuh/wazuh-qa/pull/5234)) \- (Tests) +- Fix provision macOS endpoints with npm ([#5128](https://github.com/wazuh/wazuh-qa/pull/5158)) \- (Tests) +- Fix timestamps alerts and logs filter ([#5157](https://github.com/wazuh/wazuh-qa/pull/5157)) \- (Framework + Tests) +- Fix macOS and Windows agents timezone ([#5178](https://github.com/wazuh/wazuh-qa/pull/5178)) \- (Framework) +- Fix Vulnerability Detector E2E tests by adding description to all tests ([#5151](https://github.com/wazuh/wazuh-qa/pull/5151)) \- (Tests) +- Fix parser for non package vulnerabilities ([#5146](https://github.com/wazuh/wazuh-qa/pull/5146)) \- (Framework) +- Fix remote_operations_handler functions to Vulnerability Detector E2E tests ([#5155](https://github.com/wazuh/wazuh-qa/pull/5155)) \- (Framework) +- Fix enrollment cluster system tests ([#5134](https://github.com/wazuh/wazuh-qa/pull/5134)) \- (Tests) - Fix `test_synchronization` system test ([#5089](https://github.com/wazuh/wazuh-qa/pull/5089)) \- (Framework + Tests) - Fix number of files and their size for `test_zip_size_limit` ([#5133](https://github.com/wazuh/wazuh-qa/pull/5133)) \- (Tests) - Fix test_shutdown_message system test ([#5087](https://github.com/wazuh/wazuh-qa/pull/5087)) \- (Tests) @@ -106,6 +111,11 @@ All notable changes to this project will be documented in this file. - Fix test cluster performance. ([#4780](https://github.com/wazuh/wazuh-qa/pull/4780)) \- (Framework) - Fixed the graphic generation for the logcollectord statistics files. ([#5021](https://github.com/wazuh/wazuh-qa/pull/5021)) \- (Framework) + +## [4.7.4] - 29/04/2024 + +- No changes + ## [4.7.3] - 04/03/2024 ### Changed @@ -716,4 +726,4 @@ Release report: https://github.com/wazuh/wazuh/issues/13321 - Avoid problematic race-condition on VD integration tests for Windows [#1047](https://github.com/wazuh/wazuh-qa/pull/1047) - QA Integration tests stabilization [#1002](https://github.com/wazuh/wazuh-qa/pull/1002) ### Deleted -- Deleted `behind_proxy_server` API config test. ([#1065](https://github.com/wazuh/wazuh-qa/pull/1065)) +- Deleted `behind_proxy_server` API config test. ([#1065](https://github.com/wazuh/wazuh-qa/pull/1065)) \ No newline at end of file diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py index 32a1a680e0..1661234336 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/indexer_api.py @@ -18,11 +18,78 @@ from wazuh_testing.tools.system import HostManager -STATE_INDEX_NAME = 'wazuh-vulnerabilities-states' +STATE_INDEX_NAME = 'wazuh-states-vulnerabilities' + + +def create_vulnerability_states_indexer_filter(target_agent: str | None = None, + greater_than_timestamp: str | None = None) -> dict: + """Create a filter for the Indexer API for the vulnerability state index. + + Args: + target_agent: The target agent to filter on. + greater_than_timestamp: The timestamp to filter on. + + Returns: + dict: A dictionary containing the filter. + """ + timestamp_filter = None + if greater_than_timestamp: + timestamp_filter = { + 'greater_than_timestamp': greater_than_timestamp, + 'timestamp_name': 'vulnerability.detected_at' + } + + return _create_filter(target_agent, timestamp_filter) + + +def create_alerts_filter(target_agent: str | None = None, greater_than_timestamp: str | None = None) -> dict: + """Create a filter for the Indexer API for the alerts index. + + Args: + target_agent: The target agent to filter on. + greater_than_timestamp: The timestamp to filter on. + + Returns: + dict: A dictionary containing the filter. + """ + timestamp_filter = None + if greater_than_timestamp: + timestamp_filter = { + 'greater_than_timestamp': greater_than_timestamp, + 'timestamp_name': '@timestamp' + } + + return _create_filter(target_agent, timestamp_filter) + + +def _create_filter(target_agent: str | None = None, timestamp_filter: dict | None = None) -> dict: + """Create a filter for the Indexer API. + + Args: + target_agent: The target agent to filter on. + greater_than_timestamp: The timestamp to filter on. + timestamp_field: The timestamp field to filter on. + + Returns: + dict: A dictionary containing the filter. + """ + filter = { + 'bool': { + 'must': [] + } + } + if timestamp_filter: + timestamp_field = timestamp_filter['timestamp_name'] + greater_than_timestamp = timestamp_filter['greater_than_timestamp'] + filter['bool']['must'].append({'range': {timestamp_field: {'gte': greater_than_timestamp}}}) + if target_agent: + filter['bool']['must'].append({'match': {'agent.name': target_agent}}) + + return filter def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': 'admin', 'password': 'changeme'}, - index: str = 'wazuh-alerts*', greater_than_timestamp=None, agent: str = '') -> Dict: + index: str = 'wazuh-alerts*', filter: dict | None = None, size: int = 10000) -> Dict: """ Get values from the Wazuh Indexer API. @@ -31,8 +98,8 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' credentials (Optional): A dictionary containing the Indexer credentials. Defaults to {'user': 'admin', 'password': 'changeme'}. index (Optional): The Indexer index name. Defaults to 'wazuh-alerts*'. - greater_than_timestamp (Optional): The timestamp to filter the results. Defaults to None. - agent (Optional): The agent name to filter the results. Defaults to ''. + filter (Optional): A dictionary containing the query filter. Defaults to None. + size (Optional): The number of results to retrieve. Defaults to 10000. Returns: Dict: A dictionary containing the values retrieved from the Indexer API. @@ -40,52 +107,13 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' logging.info(f"Getting values from the Indexer API for index {index}") url = f"https://{host_manager.get_master_ip()}:9200/{index}/_search" - headers = { - 'Content-Type': 'application/json', - } - - data = { - "query": { - "match_all": {} - } - } - - if greater_than_timestamp and agent: - query = { - "bool": { - "must": [ - {"range": {"@timestamp": {"gte": f"{greater_than_timestamp}"}}}, - {"match": {"agent.name": f"{agent}"}} - ] - } - } - - data['query'] = query - elif greater_than_timestamp: - query = { - "bool": { - "must": [ - {"range": {"@timestamp": {"gte": f"{greater_than_timestamp}"}}} - ] - } - } - data['query'] = query - elif agent: - query = { - "bool": { - "must": [ - {"match": {"agent.name": f"{agent}"}} - ] - } - } + data = {} + param = {'size': size} + headers = {'Content-Type': 'application/json'} - data['query'] = query - - param = { - 'pretty': 'true', - 'size': 10000, - } + if filter: + data['query'] = filter response = requests.get(url=url, params=param, verify=False, auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password']), @@ -93,3 +121,25 @@ def get_indexer_values(host_manager: HostManager, credentials: dict = {'user': ' json=data) return response.json() + + +def delete_index(host_manager: HostManager, credentials: dict = {'user': 'admin', 'password': 'changeme'}, + index: str = 'wazuh-alerts*'): + """ + Delete index from the Wazuh Indexer API. + + Args: + host_manager: An instance of the HostManager class containing information about hosts. + credentials (Optional): A dictionary containing the Indexer credentials. Defaults to + {'user': 'admin', 'password': 'changeme'}. + index (Optional): The Indexer index name. Defaults to 'wazuh-alerts*'. + """ + logging.info(f"Deleting {index} index") + + url = f"https://{host_manager.get_master_ip()}:9200/{index}/" + headers = { + 'Content-Type': 'application/json', + } + + requests.delete(url=url, verify=False, + auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password']), headers=headers) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py index 6387efa55e..7fa59317c3 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/logs.py @@ -52,6 +52,9 @@ def get_hosts_logs(host_manager: HostManager, host_group: str = 'all') -> Dict[s - host_manager (HostManager): An instance of the HostManager class for managing remote hosts. - host_group (str, optional): The name of the host group where the files will be truncated. Default is 'all'. + + Returns: + - host_logs (Dict[str, str]): Dictionary containing the logs from the ossec.log file of each host """ host_logs = {} for host in host_manager.get_group_hosts(host_group): @@ -59,3 +62,19 @@ def get_hosts_logs(host_manager: HostManager, host_group: str = 'all') -> Dict[s host_logs[host] = host_manager.get_file_content(host, logs_filepath_os[host_os_name]) return host_logs + +def get_hosts_alerts(host_manager: HostManager) -> Dict[str, str]: + """ + Get the alerts in the alert.json file from the specified host group. + + Parameters: + - host_manager (HostManager): An instance of the HostManager class for managing remote hosts. + + Returns: + - host_alerts (Dict[str, str]): Dictionary containing the alerts from the alert.json file of each manager + """ + host_alerts = {} + for host in host_manager.get_group_hosts("manager"): + host_alerts[host] = host_manager.get_file_content(host, ALERTS_JSON_PATH) + + return host_alerts diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py index cd1c4136d0..ce3300f3a4 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/regex.py @@ -46,10 +46,10 @@ 'parameters': ['HOST_NAME', 'CVE', 'PACKAGE_NAME', 'PACKAGE_VERSION', 'ARCHITECTURE'] }, 'vuln_affected': { - 'regex': 'CVE.*? affects.*"?' + 'regex': 'CVE.* affects.*"?' }, 'vuln_mitigated': { - 'regex': "The .* that affected .* was solved due to a package removal" + 'regex': "The .* that affected .* was solved due to a package removal.*" } } diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py index fb2198a9c1..3477aa75c7 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/remote_operations_handler.py @@ -22,14 +22,15 @@ """ import logging from typing import Dict, List -from datetime import datetime +from datetime import datetime, timezone from concurrent.futures import ThreadPoolExecutor from wazuh_testing.end_to_end.waiters import wait_syscollector_and_vuln_scan from wazuh_testing.tools.system import HostManager from wazuh_testing.end_to_end.vulnerability_detector import check_vuln_alert_indexer, check_vuln_state_index, \ load_packages_metadata, parse_vulnerability_detector_alerts -from wazuh_testing.end_to_end.indexer_api import get_indexer_values +from wazuh_testing.end_to_end.indexer_api import get_indexer_values, \ + create_vulnerability_states_indexer_filter, create_alerts_filter def check_vulnerability_alerts(results: Dict, check_data: Dict, current_datetime: str, host_manager: HostManager, @@ -43,12 +44,12 @@ def check_vulnerability_alerts(results: Dict, check_data: Dict, current_datetime vulnerability_index = {} for agent in host_manager.get_group_hosts('agent'): - agent_all_alerts = parse_vulnerability_detector_alerts(get_indexer_values(host_manager, - greater_than_timestamp=current_datetime, - agent=agent)['hits']['hits']) + alerts_filter = create_alerts_filter(agent, current_datetime) + index_vuln_filter = create_vulnerability_states_indexer_filter(agent, current_datetime) - agent_all_vulnerabilities = get_indexer_values(host_manager, greater_than_timestamp=current_datetime, - agent=agent, + agent_all_alerts = parse_vulnerability_detector_alerts(get_indexer_values(host_manager, + filter=alerts_filter)['hits']['hits']) + agent_all_vulnerabilities = get_indexer_values(host_manager, filter=index_vuln_filter, index='wazuh-states-vulnerabilities')['hits']['hits'] vulnerability_alerts[agent] = agent_all_alerts['affected'] @@ -164,37 +165,47 @@ def install_package(host: str, operation_data: Dict[str, Dict], host_manager: Ho package_id = None if host_os_name in install_package_data: - if host_os_arch in install_package_data[host_os_name]: - package_id = install_package_data[host_os_name][host_os_arch] - else: - raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") + try: + if host_os_arch in install_package_data[host_os_name]: + package_id = install_package_data[host_os_name][host_os_arch] + + package_data = load_packages_metadata()[package_id] + package_url = package_data['urls'][host_os_name][host_os_arch] - package_data = load_packages_metadata()[package_id] - package_url = package_data['urls'][host_os_name][host_os_arch] + logging.info(f"Installing package on {host}") + logging.info(f"Package URL: {package_url}") - logging.info(f"Installing package on {host}") - logging.info(f"Package URL: {package_url}") + current_datetime = datetime.now(timezone.utc).isoformat()[:-6] # Delete timezone offset + use_npm = package_data.get('use_npm', False) - current_datetime = datetime.utcnow().isoformat() + if use_npm: + host_manager.install_npm_package(host, package_url, system) + else: + host_manager.install_package(host, package_url, system) - host_manager.install_package(host, package_url, system) + logging.info(f"Package {package_url} installed on {host}") - logging.info(f"Package {package_url} installed on {host}") + logging.info(f"Package installed on {host}") - logging.info(f"Package installed on {host}") + results['checks']['all_successfull'] = True - results['checks']['all_successfull'] = True + wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or + operation_data['check']['state_index'] or + operation_data['check']['no_alerts'] or + operation_data['check']['no_indices']) - wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or - operation_data['check']['state_index'] or - operation_data['check']['no_alerts'] or - operation_data['check']['no_indices']) + if wait_is_required: + wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) - if wait_is_required: - wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) + check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, + package_data, operation='install') + + else: + logging.error(f"Error: Package for {host_os_name} and {host_os_arch} not found") + + except Exception as e: + logging.critical(f"Error searching package: {e}") - check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, - package_data, operation='install') else: logging.info(f"No operation to perform on {host}") @@ -239,33 +250,42 @@ def remove_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos package_id = None if host_os_name in package_data: - if host_os_arch in package_data[host_os_name]: - package_id = package_data[host_os_name][host_os_arch] - else: - raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") + try: + if host_os_arch in package_data[host_os_name]: + package_id = package_data[host_os_name][host_os_arch] + + package_data = load_packages_metadata()[package_id] + use_npm = package_data.get('use_npm', False) - package_data = load_packages_metadata()[package_id] + current_datetime = datetime.now(timezone.utc).isoformat()[:-6] # Delete timezone offset - current_datetime = datetime.utcnow().isoformat() + logging.info(f"Removing package on {host}") + if 'uninstall_name' in package_data: + uninstall_name = package_data['uninstall_name'] + if use_npm: + host_manager.remove_npm_package(host, system, package_uninstall_name=uninstall_name) + else: + host_manager.remove_package(host, system, package_uninstall_name=uninstall_name) + elif 'uninstall_custom_playbook' in package_data: + host_manager.remove_package(host, system, + custom_uninstall_playbook=package_data['uninstall_custom_playbook']) - logging.info(f"Removing package on {host}") - if 'uninstall_name' in package_data: - uninstall_name = package_data['uninstall_name'] - host_manager.remove_package(host, system, package_uninstall_name=uninstall_name) - elif 'uninstall_custom_playbook' in package_data: - host_manager.remove_package(host, system, - custom_uninstall_playbook=package_data['uninstall_custom_playbook']) + wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or + operation_data['check']['state_index'] or + operation_data['check']['no_alerts'] or + operation_data['check']['no_indices']) - wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or - operation_data['check']['state_index'] or - operation_data['check']['no_alerts'] or - operation_data['check']['no_indices']) + if wait_is_required: + wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) - if wait_is_required: - wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) + check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, + package_data, operation='remove') - check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, - package_data, operation='remove') + else: + logging.error(f"Error: Package for {host_os_name} and {host_os_arch} not found") + + except Exception as e: + logging.critical(f"Error searching package: {e}") else: logging.info(f"No operation to perform on {host}") @@ -316,41 +336,55 @@ def update_package(host: str, operation_data: Dict[str, Dict], host_manager: Hos package_id_to = None if host_os_name in install_package_data_from: - if host_os_arch in install_package_data_from[host_os_name]: - package_id_from = install_package_data_from[host_os_name][host_os_arch] - else: - raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") + try: + if host_os_arch in install_package_data_from[host_os_name]: + package_id_from = install_package_data_from[host_os_name][host_os_arch] + else: + logging.error(f"Error: Package for {host_os_name} and {host_os_arch} not found") + except Exception as e: + logging.critical(f"Error searching package: {e}") if host_os_name in install_package_data_to: - if host_os_arch in install_package_data_to[host_os_name]: - package_id_to = install_package_data_to[host_os_name][host_os_arch] - else: - raise ValueError(f"Package for {host_os_name} and {host_os_arch} not found") + try: + if host_os_arch in install_package_data_to[host_os_name]: + package_id_to = install_package_data_to[host_os_name][host_os_arch] + + package_data_from = load_packages_metadata()[package_id_from] + package_data_to = load_packages_metadata()[package_id_to] + + package_url_to = package_data_to['urls'][host_os_name][host_os_arch] + + logging.info(f"Installing package on {host}") + logging.info(f"Package URL: {package_url_to}") + + current_datetime = datetime.now(timezone.utc).isoformat()[:-6] # Delete timezone offset + use_npm = package_data_to.get('use_npm', False) - package_data_from = load_packages_metadata()[package_id_from] - package_data_to = load_packages_metadata()[package_id_to] + if use_npm: + host_manager.install_npm_package(host, package_url_to, system) + else: + host_manager.install_package(host, package_url_to, system) - package_url_to = package_data_to['urls'][host_os_name][host_os_arch] + logging.info(f"Package {package_url_to} installed on {host}") - logging.info(f"Installing package on {host}") - logging.info(f"Package URL: {package_url_to}") + logging.info(f"Package installed on {host}") - current_datetime = datetime.utcnow().isoformat() - host_manager.install_package(host, package_url_to, system) + wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or + operation_data['check']['state_index'] or + operation_data['check']['no_alerts'] or + operation_data['check']['no_indices']) + if wait_is_required: + wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) - logging.info(f"Package {package_url_to} installed on {host}") + check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, + {'from': package_data_from, 'to': package_data_to}, operation='update') - logging.info(f"Package installed on {host}") + else: + logging.error(f"Error: Package for {host_os_name} and {host_os_arch} not found") - wait_is_required = 'check' in operation_data and (operation_data['check']['alerts'] or - operation_data['check']['state_index'] or - operation_data['check']['no_alerts'] or - operation_data['check']['no_indices']) - if wait_is_required: - wait_syscollector_and_vuln_scan(host_manager, host, operation_data, current_datetime) + except Exception as e: + logging.critical(f"Error searching package: {e}") - check_vulnerability_alerts(results, operation_data['check'], current_datetime, host_manager, host, - {'from': package_data_from, 'to': package_data_to}, operation='update') else: logging.info(f"No operation to perform on {host}") @@ -369,7 +403,7 @@ def launch_remote_sequential_operation_on_agent(agent: str, task_list: List[Dict host_manager (HostManager): An instance of the HostManager class containing information about hosts. """ # Convert datetime to Unix timestamp (integer) - timestamp = datetime.utcnow().isoformat() + timestamp = datetime.now(timezone.utc).isoformat()[:-6] # Delete timezone offset if task_list: for task in task_list: diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py index 675a0c9bf6..98f36a79d5 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector.py @@ -23,7 +23,7 @@ from typing import Dict, List from wazuh_testing.tools.system import HostManager -from wazuh_testing.end_to_end.indexer_api import get_indexer_values +from wazuh_testing.end_to_end.indexer_api import get_indexer_values, create_vulnerability_states_indexer_filter from wazuh_testing.end_to_end.regex import REGEX_PATTERNS from collections import namedtuple @@ -78,8 +78,10 @@ def check_vuln_state_index(host_manager: HostManager, host: str, package: Dict[s package (dict): Dictionary containing package data. current_datetime (str): Datetime to filter the vulnerability state index. """ - index_vuln_state_content = get_indexer_values(host_manager, index='wazuh-states-vulnerabilities', - greater_than_timestamp=current_datetime)['hits']['hits'] + filter = create_vulnerability_states_indexer_filter(host, current_datetime) + index_vuln_state_content = get_indexer_values(host_manager, + index='wazuh-states-vulnerabilities', + filter=filter)['hits']['hits'] expected_alerts_not_found = [] logging.info(f"Checking vulnerability state index {package}") @@ -259,8 +261,8 @@ def check_vuln_state_consistency(vulnerabilities_alerts, vulnerabilities_states) if vulnerabilities_states.keys() != vulnerabilities_alerts.keys(): logging.critical("The number of agents is not the same between alerts and states") - agents_in_alerts_states = [agent for agent in vulnerabilities_states.keys() \ - if agent in vulnerabilities_alerts.keys()] + agents_in_alerts_states = [agent for agent in vulnerabilities_states.keys() + if agent in vulnerabilities_alerts.keys()] alerts_not_in_states = [] states_not_in_alerts = [] @@ -305,14 +307,23 @@ def get_vulnerabilities_from_states(vulnerabilities_states: List) -> List: try: vulnerability = Vulnerability( cve=state_vulnerability['_source']['vulnerability']['id'], - package_name=state_vulnerability['_source']['package']['name'], - package_version=state_vulnerability['_source']['package']['version'], - type=state_vulnerability['_source']['pacakge']['type'] if 'type' in state_vulnerability['_source']['vulnerability'] else None, - architecture=state_vulnerability['_source']['package']['architecture'] if 'architecture' in state_vulnerability['_source']['vulnerability'] else None + package_name=(state_vulnerability['_source']['package']['name'] + if 'package' in state_vulnerability['_source'] + and 'name' in state_vulnerability['_source']['package'] else None), + package_version=(state_vulnerability['_source']['package']['version'] + if 'package' in state_vulnerability['_source'] + and 'version' in state_vulnerability['_source']['package'] else None), + type=(state_vulnerability['_source']['package']['type'] + if 'package' in state_vulnerability['_source'] + and 'type' in state_vulnerability['_source']['package'] else None), + architecture=(state_vulnerability['_source']['package']['architecture'] + if 'package' in state_vulnerability['_source'] + and 'architecture' in state_vulnerability['_source']['package'] else None) ) vulnerabilities.append(vulnerability) except KeyError: logging.error(f"Error parsing vulnerability: {state_vulnerability}") + raise KeyError vulnerabilities = sorted(vulnerabilities, key=lambda x: (x.cve, x.package_name, x.package_version, x.architecture)) diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json index 9f5912d2c3..c648a1a04c 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/vulnerability_detector_packages/vuln_packages.json @@ -35,13 +35,39 @@ "CVE-2022-23498" ], "urls": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.5-1.aarch64.rpm" - }, "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.5_arm64.deb" + "amd64": "https://dl.grafana.com/oss/release/grafana_8.5.5_amd64.deb", + "arm64v8": "https://dl.grafana.com/oss/release/grafana_8.5.5_arm64.deb" + } + }, + "uninstall_name": "grafana*" + }, + "grafana-8.5.5-1": { + "package_name": "grafana", + "package_version": "8.5.5-1", + "CVE": [ + "CVE-2023-2183", + "CVE-2023-1410", + "CVE-2023-0594", + "CVE-2023-0507", + "CVE-2022-39324", + "CVE-2022-39307", + "CVE-2022-39306", + "CVE-2022-39229", + "CVE-2022-39201", + "CVE-2022-36062", + "CVE-2022-35957", + "CVE-2022-31130", + "CVE-2022-31123", + "CVE-2022-31107", + "CVE-2022-31097", + "CVE-2022-23552", + "CVE-2022-23498" + ], + "urls": { + "centos": { + "amd64": "https://dl.grafana.com/oss/release/grafana-8.5.5-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/oss/release/grafana-8.5.5-1.aarch64.rpm" } }, "uninstall_name": "grafana*" @@ -69,13 +95,39 @@ "CVE-2022-23498" ], "urls": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.6-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-8.5.6-1.aarch64.rpm" - }, "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_8.5.6_arm64.deb" + "amd64": "https://dl.grafana.com/oss/release/grafana_8.5.6_amd64.deb", + "arm64v8": "https://dl.grafana.com/oss/release/grafana_8.5.6_arm64.deb" + } + }, + "uninstall_name": "grafana*" + }, + "grafana-8.5.6-1": { + "package_name": "grafana", + "package_version": "8.5.6-1", + "CVE": [ + "CVE-2023-2183", + "CVE-2023-1410", + "CVE-2023-0594", + "CVE-2023-0507", + "CVE-2022-39324", + "CVE-2022-39307", + "CVE-2022-39306", + "CVE-2022-39229", + "CVE-2022-39201", + "CVE-2022-36062", + "CVE-2022-35957", + "CVE-2022-31130", + "CVE-2022-31123", + "CVE-2022-31107", + "CVE-2022-31097", + "CVE-2022-23552", + "CVE-2022-23498" + ], + "urls": { + "centos": { + "amd64": "https://dl.grafana.com/oss/release/grafana-8.5.6-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/oss/release/grafana-8.5.6-1.aarch64.rpm" } }, "uninstall_name": "grafana*" @@ -99,13 +151,35 @@ "CVE-2022-23498" ], "urls": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.1.1-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.1.1-1.aarch64.rpm" - }, "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.1.1_arm64.deb" + "amd64": "https://dl.grafana.com/oss/release/grafana_9.1.1_amd64.deb", + "arm64v8": "https://dl.grafana.com/oss/release/grafana_9.1.1_arm64.deb" + } + }, + "uninstall_name": "grafana*" + }, + "grafana-9.1.1-1": { + "package_name": "grafana", + "package_version": "9.1.1-1", + "CVE": [ + "CVE-2023-2183", + "CVE-2023-1387", + "CVE-2022-39324", + "CVE-2022-39307", + "CVE-2022-39306", + "CVE-2022-39229", + "CVE-2022-39201", + "CVE-2022-36062", + "CVE-2022-35957", + "CVE-2022-31130", + "CVE-2022-31123", + "CVE-2022-23552", + "CVE-2022-23498" + ], + "urls": { + "centos": { + "amd64": "https://dl.grafana.com/oss/release/grafana-9.1.1-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/oss/release/grafana-9.1.1-1.aarch64.rpm" } }, "uninstall_name": "grafana*" @@ -114,71 +188,128 @@ "package_name": "grafana", "package_version": "9.2.0", "CVE": [ - "CVE-2021-25804", - "CVE-2021-25803", - "CVE-2021-25802", - "CVE-2021-25801", - "CVE-2020-26664" + "CVE-2023-3128", + "CVE-2023-22462", + "CVE-2023-2183", + "CVE-2023-1410", + "CVE-2023-1387", + "CVE-2023-0594", + "CVE-2023-0507", + "CVE-2022-39328", + "CVE-2022-39324", + "CVE-2022-39307", + "CVE-2022-39306", + "CVE-2022-23552", + "CVE-2022-23498" ], "urls": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.2.0-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.2.0-1.aarch64.rpm" - }, "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.2.0_arm64.deb" - }, - "uninstall_name": "grafana*" - } + "amd64": "https://dl.grafana.com/oss/release/grafana_9.2.0_amd64.deb", + "arm64v8": "https://dl.grafana.com/oss/release/grafana_9.2.0_arm64.deb" + } + }, + "uninstall_name": "grafana*" + }, + "grafana-9.2.0-1": { + "package_name": "grafana", + "package_version": "9.2.0-1", + "CVE": [ + "CVE-2023-3128", + "CVE-2023-22462", + "CVE-2023-2183", + "CVE-2023-1410", + "CVE-2023-1387", + "CVE-2023-0594", + "CVE-2023-0507", + "CVE-2022-39328", + "CVE-2022-39324", + "CVE-2022-39307", + "CVE-2022-39306", + "CVE-2022-23552", + "CVE-2022-23498" + ], + "urls": { + "centos": { + "amd64": "https://dl.grafana.com/oss/release/grafana-9.2.0-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/oss/release/grafana-9.2.0-1.aarch64.rpm" + } + }, + "uninstall_name": "grafana*" }, "grafana-9.4.17": { "package_name": "grafana", "package_version": "9.4.17", "CVE": [], "urls": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.4.17-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.4.17-1.aarch64.rpm" - }, "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.4.17_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.4.17_arm64.deb" + "amd64": "https://dl.grafana.com/oss/release/grafana_9.4.17_amd64.deb", + "arm64v8": "https://dl.grafana.com/oss/release/grafana_9.4.17_arm64.deb" + } + }, + "uninstall_name": "grafana*" + }, + "grafana-9.4.17-1": { + "package_name": "grafana", + "package_version": "9.4.17-1", + "CVE": [], + "urls": { + "centos": { + "amd64": "https://dl.grafana.com/oss/release/grafana-9.4.17-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/oss/release/grafana-9.4.17-1.aarch64.rpm" } }, "uninstall_name": "grafana*" }, "grafana-9.5.13": { - "package_name": "grafana-enterprise", + "package_name": "grafana", "package_version": "9.5.13", "CVE": [], "urls": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.5.13-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-9.5.13-1.aarch64.rpm" - }, "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.5.13_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_9.5.13_arm64.deb" + "amd64": "https://dl.grafana.com/oss/release/grafana_9.5.13_amd64.deb", + "arm64v8": "https://dl.grafana.com/oss/release/grafana_9.5.13_arm64.deb" + } + }, + "uninstall_name": "grafana*" + }, + "grafana-9.5.13-1": { + "package_name": "grafana", + "package_version": "9.5.13-1", + "CVE": [], + "urls": { + "centos": { + "amd64": "https://dl.grafana.com/oss/release/grafana-9.5.13-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/oss/release/grafana-9.5.13-1.aarch64.rpm" } }, "uninstall_name": "grafana*" }, "grafana-10.0.0": { - "package_name": "grafana-enterprise", + "package_name": "grafana", "package_version": "10.0.0", "CVE": [ "CVE-2023-4822", "CVE-2023-4399" ], "urls": { - "centos": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise-10.0.0-1.x86_64.rpm", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise-10.0.0-1.aarch64.rpm" - }, "ubuntu": { - "amd64": "https://dl.grafana.com/enterprise/release/grafana-enterprise_10.0.0_amd64.deb", - "arm64v8": "https://dl.grafana.com/enterprise/release/grafana-enterprise_10.0.0_arm64.deb" + "amd64": "https://dl.grafana.com/oss/release/grafana_10.0.0_amd64.deb", + "arm64v8": "https://dl.grafana.com/oss/release/grafana_10.0.0_arm64.deb" + } + }, + "uninstall_name": "grafana*" + }, + "grafana-10.0.0-1": { + "package_name": "grafana", + "package_version": "10.0.0-1", + "CVE": [ + "CVE-2023-4822", + "CVE-2023-4399" + ], + "urls": { + "centos": { + "amd64": "https://dl.grafana.com/oss/release/grafana-10.0.0-1.x86_64.rpm", + "arm64v8": "https://dl.grafana.com/oss/release/grafana-10.0.0-1.aarch64.rpm" } }, "uninstall_name": "grafana*" @@ -296,11 +427,7 @@ "CVE-2021-4044" ], "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg", - "arm64v8": "https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg" - }, - "windows": { + "windows": { "amd64": "https://nodejs.org/dist/v17.0.1/node-v17.0.1-x64.msi" } }, @@ -318,11 +445,7 @@ "CVE-2021-4044" ], "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg", - "arm64v8": "https://nodejs.org/dist/v17.1.0/node-v17.1.0.pkg" - }, - "windows": { + "windows": { "amd64": "https://nodejs.org/dist/v17.1.0/node-v17.1.0-x64.msi" } }, @@ -332,6 +455,8 @@ "package_name": "node", "package_version": "18.0.0", "CVE": [ + "CVE-2023-44487", + "CVE-2023-23936", "CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", @@ -357,11 +482,7 @@ "CVE-2022-3602" ], "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v18.0.0/node-v18.0.0.pkg", - "arm64v8": "https://nodejs.org/dist/v18.0.0/node-v18.0.0.pkg" - }, - "windows": { + "windows": { "amd64": "https://nodejs.org/dist/v18.0.0/node-v18.0.0-x64.msi" } }, @@ -371,6 +492,9 @@ "package_name": "node", "package_version": "18.1.0", "CVE": [ + "CVE-2023-44487", + "CVE-2023-23936", + "CVE-2023-30589", "CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", @@ -394,132 +518,60 @@ "CVE-2022-3602" ], "urls": { - "windows": { + "windows": { "amd64": "https://nodejs.org/dist/v18.1.0/node-v18.1.0-x64.msi" } }, "uninstall_name": "node*" }, - "node-v18.11.0": { - "package_name": "node", - "package_version": "18.11.0", - "CVE": [ - "CVE-2023-38552", - "CVE-2023-32559", - "CVE-2023-32006", - "CVE-2023-32002", - "CVE-2023-30590", - "CVE-2023-30588", - "CVE-2023-30585", - "CVE-2023-30581", - "CVE-2023-23920", - "CVE-2023-23919", - "CVE-2023-23918", - "CVE-2022-32222" - ], - "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v18.11.0/node-v18.11.0.pkg", - "arm64v8": "https://nodejs.org/dist/v18.11.0/node-v18.11.0.pkg" - } - }, - "uninstall_name": "node*" - }, - "node-v18.12.0": { - "package_name": "node", - "package_version": "18.12.0", - "CVE": [ - "CVE-2023-44487", - "CVE-2023-38552", - "CVE-2023-32002", - "CVE-2023-30590", - "CVE-2023-30588", - "CVE-2023-30585", - "CVE-2023-23936", - "CVE-2023-23920", - "CVE-2023-23919", - "CVE-2023-23918", - "CVE-2022-43548", - "CVE-2022-3786", - "CVE-2022-3602" - ], - "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v18.12.0/node-v18.12.0.pkg", - "arm64v8": "https://nodejs.org/dist/v18.12.0/node-v18.12.0.pkg" - } - }, - "uninstall_name": "node*" - }, - "node-v19.5.0": { + "node-v18.20.0": { "package_name": "node", - "package_version": "19.5.0", + "package_version": "18.20.0", "CVE": [], "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg", - "arm64v8": "https://nodejs.org/dist/v19.5.0/node-v19.5.0.pkg" - }, - "windows": { - "amd64": "https://nodejs.org/dist/v19.5.0/win-x86/node.exe" + "windows": { + "amd64": "https://nodejs.org/dist/v18.20.0/node-v18.20.0-x64.msi" } }, "uninstall_name": "node*" }, - "node-v19.6.0": { + "node-v18.20.2": { "package_name": "node", - "package_version": "19.6.0", + "package_version": "18.20.2", "CVE": [], "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v19.6.0/node-v19.6.0.pkg", - "arm64v8": "https://nodejs.org/dist/v19.6.0/node-v19.6.0.pkg" - }, "windows": { - "amd64": "https://nodejs.org/dist/v19.6.0/win-x86/node.exe" + "amd64": "https://nodejs.org/dist/v18.20.2/node-v18.20.2-x64.msi" } }, "uninstall_name": "node*" }, - "node-v20.0.0": { + "node-v19.5.0": { "package_name": "node", - "package_version": "20.0.0", + "package_version": "19.5.0", "CVE": [ - "CVE-2022-21824" + "CVE-2023-23936", + "CVE-2023-23920", + "CVE-2023-23918" ], "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v20.0.0/node-v20.0.0.pkg", - "arm64v8": "https://nodejs.org/dist/v20.0.0/node-v20.0.0.pkg" + "windows": { + "amd64": "https://nodejs.org/dist/v19.5.0/node-v19.5.0-x64.msi" } }, "uninstall_name": "node*" }, - "node-v20.1.0": { + "node-v19.6.0": { "package_name": "node", - "package_version": "20.1.0", + "package_version": "19.6.0", "CVE": [ - "CVE-2023-44487", - "CVE-2023-39332", - "CVE-2023-39331", - "CVE-2023-38552", - "CVE-2023-32559", - "CVE-2023-32558", - "CVE-2023-32006", - "CVE-2023-32005", - "CVE-2023-32004", - "CVE-2023-32003", - "CVE-2023-32002", - "CVE-2023-30590", - "CVE-2023-30588", - "CVE-2023-30586", - "CVE-2023-30585", - "CVE-2023-30581" + "CVE-2023-23936", + "CVE-2023-23920", + "CVE-2023-23918" ], "urls": { - "macos": { - "amd64": "https://nodejs.org/dist/v20.1.0/node-v20.1.0.pkg", - "arm64v8": "https://nodejs.org/dist/v20.1.0/node-v20.1.0.pkg" + "windows": { + "amd64": "https://nodejs.org/dist/v19.6.0/node-v19.6.0-x64.msi" } }, "uninstall_name": "node*" @@ -626,9 +678,9 @@ "CVE-2023-22007", "CVE-2023-22028", "CVE-2021-2356", - "CVE-2022-21417", - "CVE-2022-21444", - "CVE-2023-21980", + "CVE-2022-21417", + "CVE-2022-21444", + "CVE-2023-21980", "CVE-2023-21977" ], "urls": { @@ -763,5 +815,125 @@ } }, "uninstall_name": "*openjdk*" + }, + "http-proxy-0.5.9": { + "package_name": "http-proxy", + "package_version": "0.5.9", + "CVE": [ + "CVE-2017-16014" + ], + "urls": { + "macos": { + "amd64": "http-proxy@0.5.9", + "arm64v8": "http-proxy@0.5.9" + } + }, + "uninstall_name": "http-proxy", + "use_npm": true + }, + "http-proxy-0.5.10": { + "package_name": "http-proxy", + "package_version": "0.5.10", + "CVE": [ + "CVE-2017-16014" + ], + "urls": { + "macos": { + "amd64": "http-proxy@0.5.10", + "arm64v8": "http-proxy@0.5.10" + } + }, + "uninstall_name": "http-proxy", + "use_npm": true + }, + "systeminformation-4.34.23": { + "package_name": "systeminformation", + "package_version": "4.34.23", + "CVE": [ + "CVE-2021-21388", + "CVE-2021-21315" + ], + "urls": { + "macos": { + "amd64": "systeminformation@4.34.23", + "arm64v8": "systeminformation@4.34.23" + } + }, + "uninstall_name": "systeminformation", + "use_npm": true + }, + "systeminformation-5.0.0": { + "package_name": "systeminformation", + "package_version": "5.0.0", + "CVE": [ + "CVE-2021-21388", + "CVE-2021-21315", + "CVE-2023-42810" + ], + "urls": { + "macos": { + "amd64": "systeminformation@5.0.0", + "arm64v8": "systeminformation@5.0.0" + } + }, + "uninstall_name": "systeminformation", + "use_npm": true + }, + "http-proxy-0.7.0": { + "package_name": "http-proxy", + "package_version": "0.7.0", + "CVE": [ + ], + "urls": { + "macos": { + "amd64": "http-proxy@0.7.0", + "arm64v8": "http-proxy@0.7.0" + } + }, + "uninstall_name": "http-proxy", + "use_npm": true + }, + "http-proxy-0.7.2": { + "package_name": "http-proxy", + "package_version": "0.7.2", + "CVE": [ + ], + "urls": { + "macos": { + "amd64": "http-proxy@0.7.2", + "arm64v8": "http-proxy@0.7.2" + } + }, + "uninstall_name": "http-proxy", + "use_npm": true + }, + "luxon-2.5.2": { + "package_name": "luxon", + "package_version": "2.5.2", + "CVE": [ + ], + "urls": { + "macos": { + "amd64": "luxon@2.5.2", + "arm64v8": "luxon@2.5.2" + } + }, + "uninstall_name": "luxon", + "use_npm": true + }, + "luxon-3.0.0": { + "package_name": "luxon", + "package_version": "3.0.0", + "CVE": [ + "CVE-2022-31129" + ], + "urls": { + "macos": { + "amd64": "luxon@3.0.0", + "arm64v8": "luxon@3.0.0" + } + }, + "uninstall_name": "luxon", + "use_npm": true } } diff --git a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py index 8d2be19bc4..37d742a92f 100644 --- a/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py +++ b/deps/wazuh_testing/wazuh_testing/end_to_end/waiters.py @@ -43,9 +43,9 @@ def wait_until_vd_is_updated(host_manager: HostManager) -> None: host_manager (HostManager): Host manager instance to handle the environment. """ - monitoring_data = generate_monitoring_logs(host_manager, ["INFO: Action for 'vulnerability_feed_manager' finished"], + monitoring_data = generate_monitoring_logs(host_manager, ["INFO: Vulnerability scanner module started"], [VD_FEED_UPDATE_TIMEOUT], host_manager.get_group_hosts('manager')) - monitoring_events_multihost(host_manager, monitoring_data) + monitoring_events_multihost(host_manager, monitoring_data, ignore_timeout_error=False) def wait_until_vuln_scan_agents_finished(host_manager: HostManager) -> None: @@ -80,11 +80,12 @@ def wait_syscollector_and_vuln_scan(host_manager: HostManager, host: str, opera [get_event_regex({'event': 'syscollector_scan_start'}), get_event_regex({'event': 'syscollector_scan_end'})], [timeout_syscollector_scan, timeout_syscollector_scan], - host_manager.get_group_hosts('agent')) + host_manager.get_group_hosts('agent'), + greater_than_timestamp=current_datetime) truncate_remote_host_group_files(host_manager, host_manager.get_group_hosts('agent')) - monitoring_events_multihost(host_manager, monitoring_data) + monitoring_events_multihost(host_manager, monitoring_data, ignore_timeout_error=False) logging.info(f"Waiting for vulnerability scan to finish on {host}") diff --git a/deps/wazuh_testing/wazuh_testing/qa_docs/schema.yaml b/deps/wazuh_testing/wazuh_testing/qa_docs/schema.yaml index f66e7ec3b0..946a6fe8d9 100644 --- a/deps/wazuh_testing/wazuh_testing/qa_docs/schema.yaml +++ b/deps/wazuh_testing/wazuh_testing/qa_docs/schema.yaml @@ -199,9 +199,8 @@ predefined_values: - 4.7.1 - 4.7.2 - 4.7.3 + - 4.7.4 - 4.8.0 - - 4.8.1 - - 4.8.2 - 4.9.0 - 5.0.0 tags: diff --git a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py index c1384ca57d..7d5f194991 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py +++ b/deps/wazuh_testing/wazuh_testing/tools/agent_simulator.py @@ -540,7 +540,11 @@ def process_command(self, sender, message_list): sender.send_event(self.create_event(f'#!-req {req_code} ' f'{{"error":0, "message":"ok", "data":[]}} ')) elif command == 'getconfig': - response_json = '{"client":{"config-profile":"centos8","notify_time":10,"time-reconnect":60}}' + if "active-response" in message_list: + response_json = '{"active-response":{"disabled":"no"}}' + else: + response_json = '{"client":{"config-profile":"centos8","notify_time":10,"time-reconnect":60}}' + sender.send_event(self.create_event(f'#!-req {req_code} ok {response_json}')) elif command == 'getstate': response_json = '{"error":0,"data":{"global":{"start":"2021-02-26, 06:41:26","end":"2021-02-26 08:49:19"}}}' diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/binary.py b/deps/wazuh_testing/wazuh_testing/tools/performance/binary.py index 064ba11127..eb02eef076 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/binary.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/binary.py @@ -78,6 +78,10 @@ def get_process_pids(cls, process_name, check_children=True) -> list: if any(filter(lambda x: f'{process_name}.py' in x, proc.cmdline())): pid = proc.pid break + elif process_name == 'wazuh-indexer': + if any(filter(lambda x: f'{process_name}' in x, proc.cmdline())): + pid = proc.pid + break elif process_name in proc.name(): pid = proc.pid break diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py index 51a57fec16..1803941c7e 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic.py @@ -23,7 +23,7 @@ class StatisticMonitor: """This class generates a Python object to monitor the statistics file generated by Wazuh. It also recovers data - using Wazuh's API for the analysisd, remoted and wazuh-db daemons. + using Wazuh's API for the analysisd, remoted and wazuh-db daemons and for alerts and vulnerabilities indexes. There are four files: wazuh-analysisd.state, wazuh-remoted.state, wazuh-agentd.state and wazuh-logcollector.state and each one of them has unique characteristics and data. This class will parse the file, extract the data to a @@ -82,6 +82,12 @@ def __init__(self, target='agent', time_step=5, dst_dir=gettempdir(), use_state_ self.daemon = 'wazuh-db' self.use_state_file = False logger.warning("Wazuhdb stat monitoring from State File is not supported. Will get data from API.") + elif self.target == "vulnerabilities": + self.use_state_file = False + logger.warning("Vulnerabilities index monitoring from State File is not supported. Will get data from API.") + elif self.target == "alerts": + self.use_state_file = False + logger.warning("Alerts monitoring from State File is not supported. Will get data from API.") else: raise ValueError(f'The target {self.target} is not a valid one.') @@ -145,10 +151,12 @@ def _parse_state_file(self): def _parse_api_data(self): - """Read the data from the statistics file generated by Wazuh API.""" + """Read the data generated by Wazuh API.""" API_URL = f"https://{self.ip}:{self.port}" DAEMONS_ENDPOINT= f"/manager/daemons/stats?daemons_list={self.daemon}&wait_for_complete=true" + VULNS_ENDOPOINT= f"/wazuh-states-vulnerabilities/_count" + ALERTS_ENDPOINT= f"/wazuh-alerts-4.x-*/_count" TOKEN_ENDPOINT="/security/user/authenticate" logging.info("Getting statistics data from API for {}".format(self.target)) @@ -156,35 +164,64 @@ def _parse_api_data(self): max_retries = 3 token_response = None daemon_response = None - # Try to get the response token three times - for _ in range(max_retries): - try: - token_response = requests.get(API_URL + TOKEN_ENDPOINT, verify=False, - auth=requests.auth.HTTPBasicAuth("wazuh", "wazuh")) - if token_response.status_code == 200: - break - except requests.exceptions.RequestException as e: - logging.error(f"Error getting token from API: {str(e)}") - else: - logging.error("Retrying get API data, status code {}".format(token_response.status_code)) - - for _ in range(max_retries): - try: - daemons_response = requests.get(API_URL + DAEMONS_ENDPOINT, verify=False, - headers={'Authorization': 'Bearer ' + token_response.json()['data']['token']}) - if daemons_response.status_code == 200: - break - except requests.exceptions.RequestException as e: - logging.error(f"Error fetching {self.daemon} datafrom API: {str(e)}") + data = None + + if(self.target == "vulnerabilities"): + for _ in range(max_retries): + try: + response = requests.get(API_URL + VULNS_ENDOPOINT, verify=False, + auth=requests.auth.HTTPBasicAuth("admin", "admin")) + if response.status_code == 200: + data = response.json()['count'] + break + except requests.exceptions.RequestException as e: + logging.error(f"Error fetching {self.target} data from API: {str(e)}") + else: + logging.error("Failed to fetch vulnerabilities data after 3 attempts") + + elif(self.target == "alerts"): + for _ in range(max_retries): + try: + response = requests.get(API_URL + ALERTS_ENDPOINT, verify=False, + auth=requests.auth.HTTPBasicAuth("admin", "admin")) + if response.status_code == 200: + data = response.json()['count'] + break + except requests.exceptions.RequestException as e: + logging.error(f"Error fetching {self.target} data from API: {str(e)}") + else: + logging.error("Failed to fetch alerts data after 3 attempts") + else: - logging.error("Failed to fetch daemons data after 3 attempts") + # Try to get the response token three times + for _ in range(max_retries): + try: + token_response = requests.get(API_URL + TOKEN_ENDPOINT, verify=False, + auth=requests.auth.HTTPBasicAuth("wazuh", "wazuh")) + if token_response.status_code == 200: + break + except requests.exceptions.RequestException as e: + logging.error(f"Error getting token from API: {str(e)}") + else: + logging.error("Retrying get API data, status code {}".format(token_response.status_code)) + + for _ in range(max_retries): + try: + response = requests.get(API_URL + DAEMONS_ENDPOINT, verify=False, + headers={'Authorization': 'Bearer ' + token_response.json()['data']['token']}) + if response.status_code == 200: + data = response.json()['data']['affected_items'][0] + break + except requests.exceptions.RequestException as e: + logging.error(f"Error fetching {self.daemon} datafrom API: {str(e)}") + else: + logging.error("Failed to fetch daemons data after 3 attempts") - data = daemons_response.json()['data']['affected_items'][0] self._write_csv(data, self.target, self.csv_file) def _write_csv(self, data, target, csv_file): - """Write the data collected from the .state into a CSV file. + """Write the data collected into a CSV file. Args: data (dict): dictionary containing the info from the .state file. @@ -199,6 +236,10 @@ def _write_csv(self, data, target, csv_file): csv_header = headers.remoted_header if self.use_state_file else headers.remoted_api_header elif target == "wazuhdb": csv_header = headers.wazuhdb_header + elif target == "vulnerabilities": + csv_header = headers.vulns_header + elif target == "alerts": + csv_header = headers.alerts_header else: csv_header = headers.agentd_header @@ -212,10 +253,11 @@ def _write_csv(self, data, target, csv_file): timestamp = datetime.fromtimestamp(time()).strftime('%Y-%m-%d %H:%M:%S') if self.use_state_file == False: - format = r"%Y-%m-%dT%H:%M:%S+%f:00" - datetime_timestamp = datetime.strptime(data['timestamp'], format) - datetime_uptime = datetime.strptime(data['uptime'], format) - interval = (datetime_timestamp - datetime_uptime).total_seconds() + if target not in ["vulnerabilities", "alerts"]: + format = r"%Y-%m-%dT%H:%M:%S+%f:00" + datetime_timestamp = datetime.strptime(data['timestamp'], format) + datetime_uptime = datetime.strptime(data['uptime'], format) + interval = (datetime_timestamp - datetime_uptime).total_seconds() if target == "analysis": metrics = data['metrics'] @@ -298,6 +340,16 @@ def _write_csv(self, data, target, csv_file): decoded['monitor'] / interval, # 62 decoded['remote'] / interval, # 63 )) + elif target == "vulnerabilities": + logger.info("Writing vulnerabilities data info to {}.".format(csv_file)) + log.write(("{0}\n").format( + data + )) + elif target == "alerts": + logger.info("Writing alerts data info to {}.".format(csv_file)) + log.write(("{0}\n").format( + data + )) elif target == "remote": metrics = data['metrics'] received_messages = metrics['messages']['received_breakdown'] diff --git a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py index a7b15c0423..3cd8567be5 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py +++ b/deps/wazuh_testing/wazuh_testing/tools/performance/statistic_headers.py @@ -165,6 +165,10 @@ "Number of messages", "Number of events buffered"] +vulns_header = ["Total vulnerabilities"] + +alerts_header = ["Total alerts"] + wazuhdb_header = ["Timestamp", "API Timestamp", diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py index 9726739b06..93829ac8a4 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/system.py +++ b/deps/wazuh_testing/wazuh_testing/tools/system.py @@ -362,7 +362,7 @@ def make_api_call(self, host, port=55000, method='GET', endpoint='/', request_bo f'method={method} headers="{headers}" {request_body} ' f'validate_certs=no', check=check) - def run_command(self, host: str, cmd: str, check: bool = False): + def run_command(self, host: str, cmd: str, check: bool = False, system: str = 'linux'): """Run a command on the specified host and return its stdout. Args: @@ -370,13 +370,18 @@ def run_command(self, host: str, cmd: str, check: bool = False): cmd (str): Command to execute check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be applied. Default `False` + system (str): The operating system type. Defaults to 'linux'. + Supported values: 'windows', 'macos', 'linux'. Returns: stdout (str): The output of the command execution. """ - return self.get_host(host).ansible("command", cmd, check=check)["stdout"] + if system == 'windows': + return self.get_host(host).ansible("win_command", cmd, check=check) + else: + return self.get_host(host).ansible("command", cmd, check=check)["stdout"] - def run_shell(self, host: str, cmd: str, check: bool = False): + def run_shell(self, host: str, cmd: str, check: bool = False, system: str = 'linux'): """Run a shell command on the specified host and return its stdout. The difference with run_command is that here, shell symbols like &, |, etc. are interpreted. @@ -386,11 +391,16 @@ def run_shell(self, host: str, cmd: str, check: bool = False): cmd (str): Shell command to execute check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be applied. Default `False` + system (str): The operating system type. Defaults to 'linux'. + Supported values: 'windows', 'macos', 'linux'. Returns: stdout (str): The output of the command execution. """ - return self.get_host(host).ansible('shell', cmd, check=check)['stdout'] + if system == 'windows': + return self.get_host(host).ansible("win_shell", cmd, check=check) + else: + return self.get_host(host).ansible('shell', cmd, check=check)['stdout'] def get_host_ip(self, host: str, interface: str): """Get the Ansible object for communicating with the specified host. @@ -495,7 +505,7 @@ def install_package(self, host, url, system='ubuntu'): result = True elif system == 'centos': result = self.get_host(host).ansible("yum", f"name={url} state=present " - 'sslverify=false disable_gpg_check=True', check=False) + 'sslverify=false disable_gpg_check=True', check=False) elif system == 'macos': package_name = url.split('/')[-1] result = self.get_host(host).ansible("command", f"curl -LO {url}", check=False) @@ -506,6 +516,40 @@ def install_package(self, host, url, system='ubuntu'): return result + def install_npm_package(self, host, url, system='ubuntu'): + """ + Installs a package on the specified host using npm. + + Args: + host (str): The target host on which to install the package. + url (str): The URL or name of the package to be installed. + system (str, optional): The operating system type. Defaults to 'ubuntu'. + Supported values: 'windows', 'ubuntu', 'centos', 'macos'. + + Returns: + Dict: Testinfra Ansible Response of the operation + + Example: + host_manager.install_package('my_host', 'package_name', 'system_name') + """ + + # Define the npm install command + cmd = f"npm install -g {url}" + + if system == 'macos': + cmd = f"PATH=/usr/local/bin:$PATH {cmd}" + shell_type = "shell" + elif system == 'windows': + shell_type = "win_shell" + else: + shell_type = "shell" + + # Execute the command and log the result + result = self.get_host(host).ansible(shell_type, cmd, check=False) + logging.info(f"npm package installed result {result}") + + return result + def get_master_ip(self): """ Retrieves the IP address of the master node from the inventory. @@ -594,6 +638,49 @@ def remove_package(self, host, system, package_uninstall_name=None, custom_unins return remove_operation_result + def remove_npm_package(self, host, system, package_uninstall_name=None, custom_uninstall_playbook=None): + """ + Removes a package from the specified host using npm. + + Args: + host (str): The target host from which to remove the package. + package_name (str): The name of the package to be removed. + system (str): The operating system type. + Supported values: 'windows', 'ubuntu', 'centos', 'macos'. + + Returns: + Dict: Testinfra Ansible Response of the operation + + Example: + host_manager.remove_npm_package('my_host', 'system_name', 'package_name') + """ + logging.info(f"Removing package {package_uninstall_name} from host {host}") + logging.info(f"System: {system}") + + remove_operation_result = False + + os_name = self.get_host_variables(host)['os_name'] + + if custom_uninstall_playbook: + remove_operation_result = self.run_playbook(host, custom_uninstall_playbook) + else: + # Define the npm uninstall command + cmd = f"npm uninstall -g {package_uninstall_name}" + + if system == 'macos': + cmd = f"PATH=/usr/local/bin:$PATH {cmd}" + shell_type = "shell" + elif system == 'windows': + shell_type = "win_shell" + else: + shell_type = "shell" + + # Execute the command and log the result + remove_operation_result = self.get_host(host).ansible(shell_type, cmd, check=False) + logging.info(f"npm package removed result {remove_operation_result}") + + return remove_operation_result + def run_playbook(self, host, playbook_name, params=None): """ Executes an Ansible playbook on the specified host. @@ -670,7 +757,7 @@ def handle_wazuh_services(self, host, operation): if os == 'linux': result = binary_path = f"/var/ossec/bin/wazuh-control" elif os == 'macos': - result= binary_path = f"/Library/Ossec/bin/wazuh-control" + result = binary_path = f"/Library/Ossec/bin/wazuh-control" result = self.get_host(host).ansible('shell', f"{binary_path} {operation}", check=False) diff --git a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml index 90605a7ffb..91dce5f6bc 100644 --- a/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml +++ b/tests/end_to_end/test_vulnerability_detector/cases/test_vulnerability.yaml @@ -2,20 +2,20 @@ id: install_package description: | Installation of a vulnerable package - macos: - Used Package: Node 17.0.1 - PKG Format - CVES: - amd64: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], - arm64v8: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + macos: + Used Package: http-proxy 0.5.9 - npm Format + CVES: + amd64: ["CVE-2017-16014"], + arm64v8: ["CVE-2017-16014"], windows: - Used Package: Node 17.0.1 - Exe Format + Used Package: Node 17.0.1 - .msi Format CVE: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], ubuntu: - Used Package Mysql 5.5.20 - .deb Format - CVE: ["CVE-2023-22028", "CVE-2023-22026", "CVE-2023-22015", "CVE-2023-22007", "CVE-2023-21980", "CVE-2023-21977", "CVE-2022-21444", "CVE-2022-21417", "CVE-2021-22570", "CVE-2021-2356", "CVE-2020-15358", "CVE-2020-14852", "CVE-2020-14846", "CVE-2020-14845", "CVE-2020-14839", "CVE-2020-14837", "CVE-2020-14830"] + Used Packages: Grafana 8.5.5 - .deb Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"], centos: - Used Package Openjdk 1.6.0 - .rpm Format - CVE: ["CVE-2023-21967", "CVE-2023-21954", "CVE-2023-21939", "CVE-2023-21938", "CVE-2023-21937", "CVE-2023-21930", "CVE-2014-2405", "CVE-2014-1876", "CVE-2014-0462", "CVE-2012-5373", "CVE-2012-2739"] + Used Packages: Grafana 8.5.5 - .rpm Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"], preconditions: null body: tasks: @@ -26,35 +26,36 @@ state_index: true package: centos: - amd64: openjdk-1.6.0 + amd64: grafana-8.5.5-1 + arm64v8: grafana-8.5.5-1 ubuntu: - amd64: mysql-5.5.20 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 windows: amd64: node-v17.0.1 macos: - amd64: node-v17.0.1 - arm64v8: node-v17.0.1 + amd64: http-proxy-0.5.9 + arm64v8: http-proxy-0.5.9 - case: Remove vulnerable package id: remove_package description: | Removal of a vulnerable package - macos: - Used Package: Node 17.0.1 - PKG Format - CVES Expected to mitigate: - ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + macos: + Used Package: http-proxy 0.5.9 - npm Format + CVES Expected to mitigate: + ["CVE-2017-16014"], windows: - Used Package: Node 17.0.1 - Exe Format - - CVES Expected to mitigate: + Used Package: Node 17.0.1 - .msi Format + CVES Expected to mitigate: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], ubuntu: - Used Package Mysql 5.5.20 - .deb Format + Used Packages: Grafana 8.5.5 - .deb Format CVES Expected to mitigate: - ["CVE-2023-22028", "CVE-2023-22026", "CVE-2023-22015", "CVE-2023-22007", "CVE-2023-21980", "CVE-2023-21977", "CVE-2022-21444", "CVE-2022-21417", "CVE-2021-22570", "CVE-2021-2356", "CVE-2020-15358", "CVE-2020-14852", "CVE-2020-14846", "CVE-2020-14845", "CVE-2020-14839", "CVE-2020-14837", "CVE-2020-14830"] + ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"], centos: - Used Package Openjdk 1.6.0 - .rpm Format + Used Packages: Grafana 8.5.5 - .rpm Format CVE Expected to mitigate: - ["CVE-2023-21967", "CVE-2023-21954", "CVE-2023-21939", "CVE-2023-21938", "CVE-2023-21937", "CVE-2023-21930", "CVE-2014-2405", "CVE-2014-1876", "CVE-2014-0462", "CVE-2012-5373", "CVE-2012-2739"] + ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"], preconditions: null body: tasks: @@ -65,32 +66,34 @@ state_index: true package: centos: - amd64: openjdk-1.6.0 + amd64: grafana-8.5.5-1 + arm64v8: grafana-8.5.5-1 ubuntu: - amd64: mysql-5.5.20 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 windows: amd64: node-v17.0.1 macos: - amd64: node-v17.0.1 - arm64v8: node-v17.0.1 + amd64: http-proxy-0.5.9 + arm64v8: http-proxy-0.5.9 - case: 'Upgrade: Maintain Vulnerability' id: upgrade_package_maintain_vulnerability description: | Upgrade of a vulnerable package which maintain vulnerability - macos: - Used Package: Node 17.1.0 - PKG Format - CVES: - amd64: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], - arm64v8: ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], + macos: + Used Package: http-proxy 0.5.10 - npm Format + CVES: + amd64: ["CVE-2017-16014"], + arm64v8: ["CVE-2017-16014"], windows: - Used Package: Node 17.1.0 - Exe Format + Used Package: Node 17.1.0 - .msi Format "CVE": ["CVE-2022-21824", "CVE-2022-0778", "CVE-2021-44533", "CVE-2021-44532", "CVE-2021-44531", "CVE-2021-4044"], ubuntu: - Used Package Mysql 5.5.21 - .deb Format - CVE: ["CVE-2023-22028", "CVE-2023-22026", "CVE-2023-22015", "CVE-2023-22007", "CVE-2023-21980", "CVE-2023-21977", "CVE-2022-21444", "CVE-2022-21417", "CVE-2021-22570", "CVE-2021-2356", "CVE-2020-15358", "CVE-2020-14852", "CVE-2020-14846", "CVE-2020-14845", "CVE-2020-14839", "CVE-2020-14837", "CVE-2020-14830"] + Used Packages: Grafana 8.5.6 - .deb Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"], centos: - Used Package Openjdk 1.7.0 - .rpm Format - CVE: ["CVE-2023-21967", "CVE-2023-21954", "CVE-2023-21939", "CVE-2023-21938", "CVE-2023-21937", "CVE-2023-21930", "CVE-2014-8873", "CVE-2014-2483", "CVE-2014-1876", "CVE-2013-2461", "CVE-2012-5373", "CVE-2012-2739"] + Used Packages: Grafana 8.5.6 - .rpm Format + CVE: ["CVE-2023-2183", "CVE-2023-1410", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-31107", "CVE-2022-31097", "CVE-2022-23552", "CVE-2022-23498"], preconditions: tasks: - operation: install_package @@ -100,14 +103,16 @@ state_index: true package: centos: - amd64: openjdk-1.6.0 + amd64: grafana-8.5.5-1 + arm64v8: grafana-8.5.5-1 ubuntu: - amd64: mysql-5.5.20 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 windows: amd64: node-v17.0.1 macos: - amd64: node-v17.0.1 - arm64v8: node-v17.0.1 + amd64: http-proxy-0.5.9 + arm64v8: http-proxy-0.5.9 body: tasks: - operation: update_package @@ -118,41 +123,45 @@ package: from: centos: - amd64: openjdk-1.6.0 + amd64: grafana-8.5.5-1 + arm64v8: grafana-8.5.5-1 ubuntu: - amd64: mysql-5.5.20 + amd64: grafana-8.5.5 + arm64v8: grafana-8.5.5 windows: amd64: node-v17.0.1 macos: - amd64: node-v17.0.1 - arm64v8: node-v17.0.1 + amd64: http-proxy-0.5.9 + arm64v8: http-proxy-0.5.9 to: centos: - amd64: openjdk-1.7.0 + amd64: grafana-8.5.6-1 + arm64v8: grafana-8.5.6-1 ubuntu: - amd64: mysql-5.5.21 + amd64: grafana-8.5.6 + arm64v8: grafana-8.5.6 windows: amd64: node-v17.1.0 macos: - amd64: node-v17.1.0 - arm64v8: node-v17.1.0 + amd64: http-proxy-0.5.10 + arm64v8: http-proxy-0.5.10 - case: 'Upgrade: New vulnerability ' id: upgrade_package_maintain_add_vulnerability description: | Upgrade of a vulnerable package which include a new vulnerability - macos: - Used Package: Node 18.11.0 - PKG Format - CVES: ["CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-30581", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-32222"], + macos: + Used Package: systeminformation 5.0.0 - npm Format + CVE: ["CVE-2021-21388", "CVE-2021-21315", "CVE-2023-42810"], windows: - Used Package: Node 18.0.0 - Exe Format - "CVE": ["CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30589", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-30581", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-43548", "CVE-2022-35256", "CVE-2022-35255", "CVE-2022-32223", "CVE-2022-32222", "CVE-2022-32215", "CVE-2022-32214", "CVE-2022-32213", "CVE-2022-32212", "CVE-2022-3786", "CVE-2022-3602"], + Used Package: Node 18.0.0 - .msi Format + "CVE": ["CVE-2023-44487", CVE-2023-23936", CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30589", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-30581", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-43548", "CVE-2022-35256", "CVE-2022-35255", "CVE-2022-32223", "CVE-2022-32222", "CVE-2022-32215", "CVE-2022-32214", "CVE-2022-32213", "CVE-2022-32212", "CVE-2022-3786", "CVE-2022-3602"], ubuntu: - Used Package Mysql 5.5.19 - .deb Format - CVE: ["CVE-2023-22026", "CVE-2023-22015", "CVE-2023-22007", "CVE-2023-21980", "CVE-2023-21977", "CVE-2022-21444", "CVE-2022-21417", "CVE-2021-22570", "CVE-2023-22007", "CVE-2023-22028", "CVE-2021-2356", "CVE-2022-21417", "CVE-2022-21444", "CVE-2023-21980", "CVE-2023-21977"] + Used Packages: Grafana 9.1.1 - .deb Format + CVE: ["CVE-2023-2183", "CVE-2023-1387", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-23552", "CVE-2022-23498"], centos: - Used Package Openjdk 1.7.0 - .rpm Format - CVE: ["CVE-2023-21967", "CVE-2023-21954", "CVE-2023-21939", "CVE-2023-21938", "CVE-2023-21937", "CVE-2023-21930", "CVE-2014-8873", "CVE-2014-2483", "CVE-2014-1876", "CVE-2013-2461", "CVE-2012-5373", "CVE-2012-2739"] + Used Packages: Grafana 9.1.1 - .rpm Format + CVE: ["CVE-2023-2183", "CVE-2023-1387", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-23552", "CVE-2022-23498"], preconditions: null body: tasks: @@ -164,43 +173,47 @@ package: from: centos: - amd64: openjdk-1.6.0 + amd64: grafana-8.5.6-1 + arm64v8: grafana-8.5.6-1 ubuntu: - amd64: mysql-5.5.18 + amd64: grafana-8.5.6 + arm64v8: grafana-8.5.6 windows: amd64: node-v17.1.0 macos: - amd64: node-v17.1.0 - arm64v8: node-v17.1.0 + amd64: systeminformation-4.34.23 + arm64v8: systeminformation-4.34.23 to: centos: - amd64: openjdk-1.7.0 + amd64: grafana-9.1.1-1 + arm64v8: grafana-9.1.1-1 ubuntu: - amd64: mysql-5.5.19 + amd64: grafana-9.1.1 + arm64v8: grafana-9.1.1 windows: amd64: node-v18.0.0 macos: - amd64: node-v18.11.0 - arm64v8: node-v18.11.0 - + amd64: systeminformation-5.0.0 + arm64v8: systeminformation-5.0.0 + - case: 'Upgrade: Maintain and new vulnerability ' id: upgrade_package_maintain_add_vulnerability description: > Upgrade of a vulnerable package which maintain vulnerabilities and include new ones - macos: - Used Package: Node 18.12.0 - PKG Format - "CVE": ["CVE-2023-44487", "CVE-2023-38552", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-23936", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-43548", "CVE-2022-3786", "CVE-2022-3602"], + macos: + Used Package: systeminformation 5.0.0 - npm Format + "CVE": ["CVE-2021-21388", "CVE-2021-21315", "CVE-2023-42810"], windows: - Used Package: Node 18.1.0 - Exe Format - "CVE": ["CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-30581", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-43548", "CVE-2022-35256", "CVE-2022-35255", "CVE-2022-32222", "CVE-2022-32215", "CVE-2022-32214", "CVE-2022-32213", "CVE-2022-32212", "CVE-2022-3786", "CVE-2022-3602"], + Used Package: Node 18.1.0 - .msi Format + "CVE": ["CVE-2023-44487, CVE-2023-23936, CVE-2023-30589, CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32006", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30585", "CVE-2023-30581", "CVE-2023-23920", "CVE-2023-23919", "CVE-2023-23918", "CVE-2022-43548", "CVE-2022-35256", "CVE-2022-35255", "CVE-2022-32222", "CVE-2022-32215", "CVE-2022-32214", "CVE-2022-32213", "CVE-2022-32212", "CVE-2022-3786", "CVE-2022-3602"], ubuntu: - Used Package Mysql 5.5.19 - .deb Format - CVE: ["CVE-2023-22026", "CVE-2023-22015", "CVE-2023-22007", "CVE-2023-21980", "CVE-2023-21977", "CVE-2022-21444", "CVE-2022-21417", "CVE-2021-22570", "CVE-2023-22007", "CVE-2023-22028", "CVE-2021-2356", "CVE-2022-21417", "CVE-2022-21444", "CVE-2023-21980", "CVE-2023-21977"] + Used Packages: Grafana 9.2.0 - .deb Format + CVE: ["CVE-2023-3128", "CVE-2023-22462", "CVE-2023-2183", "CVE-2023-1410", "CVE-2023-1387", "CVE-2023-0594", "CVE-2023-0507", "CVE-2022-39328", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-23552", "CVE-2022-23498"], centos: - Used Package Openjdk 1.8.0 - .rpm Format - CVE: ["CVE-2023-21967", "CVE-2023-21954", "CVE-2023-21939", "CVE-2023-21938", "CVE-2023-21937", "CVE-2023-21930", "CVE-2021-20264", "CVE-2014-1876", "CVE-2012-2739"] + Used Packages: Grafana 9.2.0 - .rpm Format + CVE: ["CVE-2023-2183", "CVE-2023-1387", "CVE-2022-39324", "CVE-2022-39307", "CVE-2022-39306", "CVE-2022-39229", "CVE-2022-39201", "CVE-2022-36062", "CVE-2022-35957", "CVE-2022-31130", "CVE-2022-31123", "CVE-2022-23552", "CVE-2022-23498"], preconditions: null body: tasks: @@ -212,41 +225,45 @@ package: from: centos: - amd64: openjdk-1.7.0 + amd64: grafana-9.1.1-1 + arm64v8: grafana-9.1.1-1 ubuntu: - amd64: mysql-5.5.18 + amd64: grafana-9.1.1 + arm64v8: grafana-9.1.1 windows: amd64: node-v18.0.0 macos: - amd64: node-v18.11.0 - arm64v8: node-v18.11.0 + amd64: systeminformation-4.34.23 + arm64v8: systeminformation-4.34.23 to: centos: - amd64: openjdk-1.8.0 + amd64: grafana-9.2.0-1 + arm64v8: grafana-9.2.0-1 ubuntu: - amd64: mysql-5.5.19 + amd64: grafana-9.2.0 + arm64v8: grafana-9.2.0 windows: amd64: node-v18.1.0 macos: - amd64: node-v18.12.0 - arm64v8: node-v18.12.0 - + amd64: systeminformation-5.0.0 + arm64v8: systeminformation-5.0.0 + - case: 'Upgrade: Cease vulnerability' id: upgrade_package_remove_vulnerability description: | Upgrade of a vulnerable which cease to be vulnerable - macos: - Used Package: Node 19.5.0 - PKG Format - "CVE": [], + macos: + Used Package: http-proxy 0.7.0 - npm Format + "CVE": [], windows: - Used Package: Node 19.5.0 - Exe Format + Used Package: Node 18.20.0 - .msi Format "CVE": [], ubuntu: - Used Package Grafana 9.4.17 - .deb Format - CVE: [] + Used Packages: Grafana 9.4.17 - .deb Format + CVE: [], centos: - Used Package Grafana 9.4.17 - .rpm Format - CVE: [] + Used Packages: Grafana 9.4.17 - .rpm Format + CVE: [], preconditions: null body: tasks: @@ -258,45 +275,45 @@ package: from: centos: - amd64: grafana-9.2.0 - arm64v8: grafana-9.2.0 + amd64: grafana-9.2.0-1 + arm64v8: grafana-9.2.0-1 ubuntu: amd64: grafana-9.2.0 arm64v8: grafana-9.2.0 windows: amd64: node-v18.1.0 macos: - amd64: node-v18.12.0 - arm64v8: node-v18.12.0 + amd64: http-proxy-0.5.10 + arm64v8: http-proxy-0.5.10 to: centos: - amd64: grafana-9.4.17 - arm64v8: grafana-9.4.17 + amd64: grafana-9.4.17-1 + arm64v8: grafana-9.4.17-1 ubuntu: arm64v8: grafana-9.4.17 amd64: grafana-9.4.17 windows: - amd64: node-v19.5.0 + amd64: node-v18.20.0 macos: - amd64: node-v19.5.0 - arm64v8: node-v19.5.0 - + amd64: http-proxy-0.7.0 + arm64v8: http-proxy-0.7.0 + - case: 'Upgrade: Non vulnerable to non vulnerable' id: upgrade_package_nonvulnerable_to_nonvulnerable description: | Upgrade of a non vulnerable package to non vulnerable - macos: - Used Package: Node 19.5.0 - PKG Format - "CVE": [], + macos: + Used Package: http-proxy 0.7.2 - npm Format + "CVE": [], windows: - Used Package: Node 19.5.0 - Exe Format + Used Package: Node 18.20.2 - .msi Format "CVE": [], ubuntu: - Used Package Grafana 9.5.13 - .deb Format - CVE: [] + Used Packages: Grafana 9.5.13 - .deb Format + CVE: [], centos: - Used Package Grafana 9.5.13 - .rpm Format - CVE: [] + Used Packages: Grafana 9.5.13 - .rpm Format + CVE: [], preconditions: tasks: - operation: install_package @@ -306,7 +323,10 @@ state_index: true package: windows: - amd64: node-v19.5.0 + amd64: node-v18.20.0 + macos: + amd64: http-proxy-0.7.0 + arm64v8: http-proxy-0.7.0 body: tasks: - operation: update_package @@ -317,44 +337,44 @@ package: from: centos: - amd64: grafana-9.4.17 - arm64v8: grafana-9.4.17 + amd64: grafana-9.4.17-1 + arm64v8: grafana-9.4.17-1 ubuntu: arm64v8: grafana-9.4.17 amd64: grafana-9.4.17 windows: - amd64: node-v19.5.0 + amd64: node-v18.20.0 macos: - amd64: node-v19.5.0 - arm64v8: node-v19.5.0 + amd64: http-proxy-0.7.0 + arm64v8: http-proxy-0.7.0 to: centos: - amd64: grafana-9.5.13 - arm64v8: grafana-9.5.13 + amd64: grafana-9.5.13-1 + arm64v8: grafana-9.5.13-1 ubuntu: amd64: grafana-9.5.13 arm64v8: grafana-9.5.13 windows: - amd64: node-v19.6.0 + amd64: node-v18.20.2 macos: - amd64: node-v19.6.0 - arm64v8: node-v19.6.0 - + amd64: http-proxy-0.7.2 + arm64v8: http-proxy-0.7.2 + - case: 'Upgrade: Non vulnerable to vulnerable package' id: upgrade_package_nonvulnerable_to_vulnerable description: | Upgrade to non vulnerable package to vulnerable - macos: - Used Package: Node 20.0.0 - PKG Format - "CVE": ["CVE-2023-44487", "CVE-2023-39332", "CVE-2023-39331", "CVE-2023-38552", "CVE-2023-32559", "CVE-2023-32558", "CVE-2023-32006", "CVE-2023-32005", "CVE-2023-32004", "CVE-2023-32003", "CVE-2023-32002", "CVE-2023-30590", "CVE-2023-30588", "CVE-2023-30586", "CVE-2023-30585", "CVE-2023-30581"], + macos: + Used Package: luxon 3.0.0 - npm Format + "CVE": ["CVE-2022-31129"], windows: - Used Package: Node 20.5.1 - Exe Format + Used Package: Node 20.5.1 - .msi Format "CVE": ["CVE-2023-44487", "CVE-2023-39332", "CVE-2023-39331", "CVE-2023-38552"], ubuntu: - Used Package Grafana 9.5.13 - .deb Format + Used Packages: Grafana 10.0.0 - .deb Format CVE: ["CVE-2023-4822", "CVE-2023-4399"], centos: - Used Package Grafana 9.5.13 - .rpm Format + Used Packages: Grafana 10.0.0 - .rpm Format CVE: ["CVE-2023-4822", "CVE-2023-4399"], preconditions: null body: @@ -367,43 +387,45 @@ package: from: centos: - amd64: firefox-91.13.0 - arm64v8: grafana-8.5.5 + amd64: grafana-9.5.13-1 + arm64v8: grafana-9.5.13-1 ubuntu: - amd64: grafana-8.5.5 + amd64: grafana-9.5.13 + arm64v8: grafana-9.5.13 windows: - amd64: node-v19.6.0 + amd64: node-v18.20.2 macos: - amd64: node-v19.6.0 - arm64v8: node-v19.6.0 + amd64: luxon-2.5.2 + arm64v8: luxon-2.5.2 to: centos: - amd64: firefox-91.13.0 - arm64v8: grafana-8.5.5 + amd64: grafana-10.0.0-1 + arm64v8: grafana-10.0.0-1 ubuntu: - amd64: grafana-8.5.5 + amd64: grafana-10.0.0 + arm64v8: grafana-10.0.0 windows: amd64: node-v20.5.1 macos: - amd64: node-v20.0.0 - arm64v8: node-v20.0.0 - + amd64: luxon-3.0.0 + arm64v8: luxon-3.0.0 + - case: Installation of a non vulnerable package id: install_package_non_vulnerable description: | Installation of a non vulnerable package - macos: - Used Package: Node 19.5.0 - PKG Format - "CVE": [], + macos: + Used Package: http-proxy 0.7.0 - npm Format + "CVE": [], windows: - Used Package: Node 19.5.0 - Exe Format + Used Package: Node 18.20.0 - .msi Format "CVE": [], ubuntu: - Used Package Grafana 9.5.13 - .deb Format - CVE: [] + Used Packages: Grafana 9.5.13 - .deb Format + CVE: [], centos: - Used Package Grafana 9.5.13 - .rpm Format - CVE: [] + Used Packages: Grafana 9.5.13 - .rpm Format + CVE: [], preconditions: null body: tasks: @@ -414,32 +436,32 @@ state_index: true package: centos: - amd64: grafana-9.5.13 - arm64v8: grafana-9.5.13 + amd64: grafana-9.5.13-1 + arm64v8: grafana-9.5.13-1 ubuntu: amd64: grafana-9.5.13 arm64v8: grafana-9.5.13 windows: - amd64: node-v19.5.0 + amd64: node-v18.20.0 macos: - amd64: node-v19.6.0 - arm64v8: node-v19.6.0 + amd64: http-proxy-0.7.0 + arm64v8: http-proxy-0.7.0 - case: 'Remove: Non vulnerable package' id: remove_non_vulnerable_packge description: | Removal of a non vulnerable package - macos: - Used Package: Node 19.5.0 - PKG Format - "CVE": [], + macos: + Used Package: http-proxy 0.7.0 - npm Format + "CVE": [], windows: - Used Package: Node 19.5.0 - Exe Format + Used Package: Node 18.20.0 - .msi Format "CVE": [], ubuntu: - Used Package Grafana 9.5.13 - .deb Format + Used Packages: Grafana 9.5.13 - .deb Format CVE: [] centos: - Used Package Grafana 9.5.13 - .rpm Format - CVE: [] + Used Packages: Grafana 9.5.13 - .rpm Format + CVE: [], body: tasks: - operation: remove_package @@ -449,15 +471,13 @@ state_index: true package: centos: - amd64: grafana-9.5.13 - arm64v8: grafana-9.5.13 + amd64: grafana-9.5.13-1 + arm64v8: grafana-9.5.13-1 ubuntu: amd64: grafana-9.5.13 arm64v8: grafana-9.5.13 windows: - amd64: node-v19.5.0 + amd64: node-v18.20.0 macos: - amd64: node-v19.6.0 - arm64v8: node-v19.6.0 - - + amd64: http-proxy-0.7.0 + arm64v8: http-proxy-0.7.0 diff --git a/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml index 06b0303a9f..646063a0df 100644 --- a/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml +++ b/tests/end_to_end/test_vulnerability_detector/configurations/manager.yaml @@ -21,10 +21,10 @@ elements: - ca: value: FILEBEAT_ROOT_CA - certificate: - value: FILEBEAT_CERTIFICATE - key: - value: FILEBEAT_KEY + - certificate: + value: FILEBEAT_CERTIFICATE + - key: + value: FILEBEAT_KEY - section: sca elements: - enabled: diff --git a/tests/end_to_end/test_vulnerability_detector/conftest.py b/tests/end_to_end/test_vulnerability_detector/conftest.py index 646ae67040..b5aa64b32a 100644 --- a/tests/end_to_end/test_vulnerability_detector/conftest.py +++ b/tests/end_to_end/test_vulnerability_detector/conftest.py @@ -42,7 +42,7 @@ def test_example(host_manager): from wazuh_testing.tools.system import HostManager from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations -from wazuh_testing.end_to_end.logs import get_hosts_logs +from wazuh_testing.end_to_end.logs import get_hosts_logs, get_hosts_alerts STYLE_PATH = os.path.join(os.path.dirname(__file__), '../../../deps/wazuh_testing/wazuh_testing/reporting/style.css') @@ -61,6 +61,7 @@ def collect_e2e_environment_data(test_name, host_manager) -> None: """ logging.info("Collecting environment data") environment_logs = get_hosts_logs(host_manager) + environment_alerts = get_hosts_alerts(host_manager) current_dir = os.path.dirname(__file__) vulnerability_detector_logs_dir = os.path.join(current_dir, "logs") @@ -68,10 +69,17 @@ def collect_e2e_environment_data(test_name, host_manager) -> None: for host in environment_logs.keys(): logging.info(f"Collecting logs for {host}") - host_logs_name_evidence = host + "_ossec.log" - evidence_file = os.path.join(tests_evidences_directory, host_logs_name_evidence) - with open(evidence_file, 'w') as evidence_file: - evidence_file.write(environment_logs[host]) + host_logs_name_evidence = host + "_ossec.log" + evidence_log_file = os.path.join(tests_evidences_directory, host_logs_name_evidence) + with open(evidence_log_file, 'w') as evidence_log_file: + evidence_log_file.write(environment_logs[host]) + + for host in environment_alerts.keys(): + logging.info(f"Collecting alerts for {host}") + host_alerts_name_evidence = host + "_alert.json" + evidence_alert_file = os.path.join(tests_evidences_directory, host_alerts_name_evidence) + with open(evidence_alert_file, 'w') as evidence_alert_file: + evidence_alert_file.write(environment_alerts[host]) def collect_evidences(test_name, evidences) -> None: @@ -399,3 +407,21 @@ def pytest_html_results_summary(prefix, summary, postfix): def pytest_configure(config): if not config.option.css: config.option.css = [STYLE_PATH] + + +@pytest.fixture(scope="session", autouse=True) +def modify_timezone(request): + inventory_path = request.config.getoption('--inventory-path') + host_manager = HostManager(inventory_path) + hosts = host_manager.get_group_hosts('agent') + command = '' + + for host in hosts: + if host_manager.get_host_variables(host)['os_name'] == 'macos': + command = 'systemsetup -settimezone GMT' + elif host_manager.get_host_variables(host)['os_name'] == 'windows': + command = 'Set-TimeZone -Id "UTC"' + else: + continue + + host_manager.run_shell(host, command, system=host_manager.get_host_variables(host)['os_name']) diff --git a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py index 3756bc15a2..ef97ef88e5 100644 --- a/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py +++ b/tests/end_to_end/test_vulnerability_detector/test_vulnerability_detector.py @@ -15,13 +15,13 @@ The verification of vulnerabilities is conducted through Vulnerabilities Index and API endpoint Additionally, the tests ensure the consistency of these values. -Tests: - - TestInitialScans: Validates the initiation of Syscollector scans across all agents in the environment. +Tests: + - TestInitialScans: Validates the initiation of Syscollector scans across all agents in the environment. - test_syscollector_first_scan: Validates the initiation of the first Syscollector scans across all agents in the environment. - test_syscollector_first_scan_index: Validates that the Vulnerability Detector detects vulnerabilities within the environment in the first scan in the index. - test_syscollector_second_scan: Validates the initiation of the second Syscollector scans across all agents in the environment. - tests_syscollector_first_second_scan_consistency_index: Ensure the consistency of the agent's vulnerabilities between the first and second scans in index. - - TestScanSyscollectorCases: Validates the Vulnerability Detector's ability to detect new vulnerabilities in the environment. + - TestScanSyscollectorCases: Validates the Vulnerability Detector's ability to detect new vulnerabilities in the environment for each of the defined cases. Issue: https://github.com/wazuh/wazuh-qa/issues/4369 @@ -53,7 +53,8 @@ from wazuh_testing.end_to_end.waiters import wait_until_vd_is_updated from wazuh_testing.end_to_end.monitoring import generate_monitoring_logs, monitoring_events_multihost from wazuh_testing.end_to_end.regex import get_event_regex -from wazuh_testing.end_to_end.indexer_api import get_indexer_values +from wazuh_testing.end_to_end.indexer_api import get_indexer_values, delete_index, \ + create_vulnerability_states_indexer_filter, create_alerts_filter from wazuh_testing.tools.configuration import load_configuration_template from wazuh_testing.tools.system import HostManager from wazuh_testing.end_to_end.remote_operations_handler import launch_parallel_operations @@ -109,6 +110,39 @@ def load_vulnerability_detector_configurations(host_manager): return configurations +@pytest.fixture(scope='module', autouse=True) +def install_npm(host_manager: HostManager): + """Check and install npm if not already installed""" + + node_version = "v21.7.1" + node_package_url = f"https://nodejs.org/dist/{node_version}/node-{node_version}.pkg" + + target_os_groups = ['macos'] + + for group in target_os_groups: + for host in host_manager.get_group_hosts(group): + # Check if Node and npm is installed + logger.info(f"Checking and installing npm on {host}") + node_check_command = "PATH=/usr/local/bin:$PATH && command -v node" + node_check_result = host_manager.get_host(host).ansible( + "shell", + node_check_command, + become=True, + become_user='vagrant', + check=False + ) + logger.info(f"Node check result on {host}: {node_check_result}") + # Install node if it is not already installed. + if node_check_result['rc'] != 0: + logger.info(f"Installing Node.js and npm using package: {node_package_url}") + + # Use the install_package method to handle the installation. + install_result = host_manager.install_package(host, node_package_url, system='macos') + + # Logging the result of installation attempt. + logger.info(f"Node.js and npm installation result on {host}: {install_result}") + else: + logger.info("Node.js and npm are already installed.") @pytest.fixture(scope='module') def setup_vulnerability_tests(host_manager: HostManager) -> Generator: @@ -125,6 +159,8 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: logger.error("Configuring environment") configure_environment(host_manager, load_vulnerability_detector_configurations(host_manager)) + logger.error("Save the Wazuh indexer username and password into the Wazuh manager keystore") + save_indexer_credentials_into_keystore(host_manager) # Truncate alerts and logs of managers and agents logger.error("Truncate managers and agents logs") @@ -133,35 +169,19 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: # Restart managers and stop agents logger.error("Stopping agents") host_manager.control_environment('stop', ['agent']) - logger.error("Restarting managers") - host_manager.control_environment('restart', ['manager']) - - logger.error("Save the Wazuh indexer username and password into the Wazuh manager keystore") - save_indexer_credentials_into_keystore(host_manager) - logger.error("Restarting managers") - host_manager.control_environment('restart', ['manager']) - - utc_now_timestamp = datetime.datetime.utcnow() + utc_now_timestamp = datetime.datetime.now(datetime.timezone.utc) # Format the date and time as per the given format test_timestamp = utc_now_timestamp.strftime("%Y-%m-%dT%H:%M:%SZ") + logger.error("Restarting managers") + host_manager.control_environment('restart', ['manager']) + # Wait until VD is updated logger.error("Wait until Vulnerability Detector has update all the feeds") wait_until_vd_is_updated(host_manager) - # Truncate alerts and logs of managers and agents - logger.error("Truncate managers and agents logs") - truncate_remote_host_group_files(host_manager, 'all', 'logs') - - # Re-Register agents: https://github.com/wazuh/wazuh/issues/21185 - logger.error("Removing agents") - host_manager.remove_agents() - - # Wait until agents are registered again - time.sleep(15) - # Start agents host_manager.control_environment('start', ['agent']) @@ -171,6 +191,10 @@ def setup_vulnerability_tests(host_manager: HostManager) -> Generator: logger.error("Truncate managers and agents logs") truncate_remote_host_group_files(host_manager, 'all', 'logs') + # Delete vulnerability index + logger.error("Delete vulnerability index") + delete_index(host_manager, index='wazuh-states-vulnerabilities') + logger.error("Restoring original configuration") restore_configuration(host_manager, hosts_configuration_backup) @@ -253,8 +277,7 @@ def test_syscollector_first_scan(self, request, host_manager, setup_vulnerabilit else: logger.critical("All agents has been scanned") - - def test_syscollector_first_scan_index(self, request, host_manager, setup_vulnerability_tests, get_results): + def test_vulnerability_first_scan_index(self, request, host_manager, setup_vulnerability_tests, get_results): """ description: Validates that the Vulnerability Detector detects vulnerabilities within the environment in the first scan in the index. @@ -310,7 +333,10 @@ def test_syscollector_first_scan_index(self, request, host_manager, setup_vulner time.sleep(TIMEOUT_PER_AGENT_VULNERABILITY_SCAN * len(agents_to_check)) for agent in agents_to_check: - agent_all_vulnerabilities = get_indexer_values(host_manager, greater_than_timestamp=setup_vulnerability_tests, agent=agent, index='wazuh-states-vulnerabilities',)['hits']['hits'] + filter = create_vulnerability_states_indexer_filter(agent, setup_vulnerability_tests) + agent_all_vulnerabilities = get_indexer_values(host_manager, + filter=filter, + index='wazuh-states-vulnerabilities')['hits']['hits'] vuln_by_agent_index[agent] = agent_all_vulnerabilities @@ -338,7 +364,6 @@ def test_syscollector_first_scan_index(self, request, host_manager, setup_vulner else: logger.critical("All agents has been scanned and updated states index") - def test_syscollector_second_scan(self, request, host_manager, setup_vulnerability_tests, get_results): """ description: Validates the initiation of the second Syscollector scans across all agents in the environment. @@ -413,7 +438,6 @@ def test_syscollector_second_scan(self, request, host_manager, setup_vulnerabili else: logger.critical("Syscollector scan started in all agents") - def tests_syscollector_first_second_scan_consistency_index(self, request, host_manager, setup_vulnerability_tests, get_results): """ @@ -463,31 +487,39 @@ def tests_syscollector_first_second_scan_consistency_index(self, request, host_m vuln_by_agent_index_second_scan = {} for agent in host_manager.get_group_hosts('agent'): + filter = create_vulnerability_states_indexer_filter(target_agent=agent, + greater_than_timestamp=setup_vulnerability_tests) agent_all_vulnerabilities = get_indexer_values(host_manager, - greater_than_timestamp=setup_vulnerability_tests, - index='wazuh-states-vulnerabilities', - agent=agent)['hits']['hits'] + filter=filter, + index='wazuh-states-vulnerabilities')['hits']['hits'] + # Only is expected alert of affected vulnerabilities vuln_by_agent_index_second_scan[agent] = agent_all_vulnerabilities test_result['evidences']['vulnerabilities_index_second_scan'] = vuln_by_agent_index_second_scan # Calculate differences between first and second scan - agent_not_found_in_first_scan = list(set(vuln_by_agent_index_second_scan.keys()) - set(results['vulnerabilities_index_first_scan'].keys())) - agent_not_found_in_second_scan = list(set(results['vulnerabilities_index_first_scan'].keys()) - set(vuln_by_agent_index_second_scan.keys())) + agent_not_found_in_first_scan = (list(set(vuln_by_agent_index_second_scan.keys()) - + set(results['vulnerabilities_index_first_scan'].keys()))) + agent_not_found_in_second_scan = (list(set(results['vulnerabilities_index_first_scan'].keys()) - + set(vuln_by_agent_index_second_scan.keys()))) - agent_found_in_all_scans = set(vuln_by_agent_index_second_scan.keys()) & set(results['vulnerabilities_index_first_scan'].keys()) + agent_found_in_all_scans = (set(vuln_by_agent_index_second_scan.keys()) & + set(results['vulnerabilities_index_first_scan'].keys())) vulnerabilities_not_found_in_first_scan = {} vulnerabilities_not_found_in_second_scan = {} for agent in agent_found_in_all_scans: vulnerabilities_second_scan = get_vulnerabilities_from_states(vuln_by_agent_index_second_scan[agent]) - vulnerabilities_first_scan = get_vulnerabilities_from_states(results['vulnerabilities_index_first_scan'][agent]) - + vulnerabilities_first_scan = get_vulnerabilities_from_states( + results['vulnerabilities_index_first_scan'][agent]) + # Calculate differences between first and second scan - vulnerabilities_not_found_second_scan = list(set(vulnerabilities_first_scan) - set(vulnerabilities_second_scan)) - vulnerabilities_not_found_first_scan = list(set(vulnerabilities_second_scan) - set(vulnerabilities_first_scan)) + vulnerabilities_not_found_second_scan = (list(set(vulnerabilities_first_scan) - + set(vulnerabilities_second_scan))) + vulnerabilities_not_found_first_scan = (list(set(vulnerabilities_second_scan) - + set(vulnerabilities_first_scan))) # Change to dict to be able to serialize vulnerabilities_not_found_first_scan = [vuln._asdict() for vuln in vulnerabilities_not_found_first_scan] @@ -498,8 +530,9 @@ def tests_syscollector_first_second_scan_consistency_index(self, request, host_m if len(vulnerabilities_not_found_first_scan) > 0: vulnerabilities_not_found_in_first_scan[agent] = vulnerabilities_not_found_first_scan - # Check if agents are the same in both scans - if len(agent_found_in_all_scans) != len(vuln_by_agent_index_second_scan) != len(results['vulnerabilities_index_first_scan']): + # Check if agents are the same in both scans + if (len(agent_found_in_all_scans) != len(vuln_by_agent_index_second_scan) != + len(results['vulnerabilities_index_first_scan'])): test_result['checks']['all_successfull'] = False logging.critical("Inconsistencies found between first and second scan in the index. Different agents found") if len(agent_not_found_in_first_scan) > 0: @@ -517,13 +550,15 @@ def tests_syscollector_first_second_scan_consistency_index(self, request, host_m test_result['evidences']['vulnerabilities_not_found_in_first_scan'] = vulnerabilities_not_found_in_first_scan if vulnerabilities_not_found_in_second_scan: - logging.critical(f"Vulnerabilities not found in second scan: {vulnerabilities_not_found_in_second_scan}") + logging.critical("Vulnerabilities not found in second scan: " + f"{vulnerabilities_not_found_in_second_scan}") test_result['evidences']['vulnerabilities_not_found_in_second_scan'] = vulnerabilities_not_found_in_second_scan - + results[test_name] = test_result if not test_result['checks']['all_successfull']: - logging_message = "Inconsistencies found between first and second scan in the index. Check evidences for more information" + logging_message = "Inconsistencies found between first and second scan in the index." \ + "Check evidences for more information" logger.critical(logging_message) pytest.fail(logging_message) @@ -555,8 +590,59 @@ def get_results(self): return self.results @pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids) - def test_vulnerability_detector_scans_cases(self, setup_vulnerability_tests, request, preconditions, body, teardown, setup, - host_manager, get_results): + def test_vulnerability_detector_scans_cases(self, setup_vulnerability_tests, request, preconditions, body, teardown, + setup, host_manager, get_results): + """ + description: Validates the Vulnerability Detector's ability to detect new vulnerabilities in the environment for each of the defined cases. + + This test evaluates the effectiveness of the Vulnerability Detector in real-world scenarios, focusing on the installation, removal, + or upgrade of various vulnerable and non-vulnerable packages in the environment. It ensures that all agents generate the expected + vulnerabilities and associated alerts. + + tier: 0 + + parameters: + - setup_vulnerability_tests: + type: fixture + brief: Setup the environment to proceed with the testing + - request: pytest request object + - preconditions: + type: fixture + brief: The preconditions within the test cases, if any + - body: + type: fixture + brief: The body of the test case, which contains the tasks to be executed + - teardown: + type: fixture + brief: The teardown within the test cases, if any + - setup: + type: fixture + brief: Test setup results, to check if the hosts are setup correctly + - host_manager: + type: fixture + brief: Get the host manager of the environment + - get_results: fixture to get the results of global class tests + + assertions: + - Verify that all the hosts are properly setup. + - Verify whether vulnerabilities remain, appear or disappear, and whether alerts appear. + + cases: + - install_package + - remove_package + - upgrade_package_maintain_vulnerability + - upgrade_package_maintain_add_vulnerability + - upgrade_package_remove_vulnerability + - upgrade_package_nonvulnerable_to_nonvulnerable + - upgrade_package_nonvulnerable_to_vulnerable + - install_package_non_vulnerable + - remove_non_vulnerable_packge + + tags: + - syscollector + - vulnerability_detector + """ + test_name = request.node.name setup_results = setup diff --git a/tests/performance/test_api/test_api_endpoints_performance.py b/tests/performance/test_api/test_api_endpoints_performance.py index b74d01dda4..d5475b75bc 100755 --- a/tests/performance/test_api/test_api_endpoints_performance.py +++ b/tests/performance/test_api/test_api_endpoints_performance.py @@ -14,9 +14,6 @@ api_details = dict() xfailed_items = { - '/active-response': {'message': 'Agent simulator not handling active-response messages: ' - 'https://github.com/wazuh/wazuh-qa/issues/1266', - 'method': 'put'}, '/agents/group': {'message': 'Investigate performance issues with PUT /agents/group API endpoint: ' 'https://github.com/wazuh/wazuh/issues/13872', 'method': 'put'}, diff --git a/tests/system/test_cluster/test_agent_enrollment/test_agent_enrollment.py b/tests/system/test_cluster/test_agent_enrollment/test_agent_enrollment.py index 2cb61ca7a5..7fc9b2b1bf 100644 --- a/tests/system/test_cluster/test_agent_enrollment/test_agent_enrollment.py +++ b/tests/system/test_cluster/test_agent_enrollment/test_agent_enrollment.py @@ -26,11 +26,13 @@ # Remove the agent once the test has finished @pytest.fixture(scope='module') def clean_environment(): + yield + + host_manager.control_service(host='wazuh-agent1', service='wazuh', state="stopped") agent_id = host_manager.run_command('wazuh-master', f'cut -c 1-3 {WAZUH_PATH}/etc/client.keys') host_manager.get_host('wazuh-master').ansible("command", f'{WAZUH_PATH}/bin/manage_agents -r {agent_id}', check=False) - host_manager.control_service(host='wazuh-agent1', service='wazuh', state="stopped") host_manager.clear_file(host='wazuh-agent1', file_path=os.path.join(WAZUH_PATH, 'etc', 'client.keys')) host_manager.clear_file(host='wazuh-agent1', file_path=os.path.join(WAZUH_LOGS_PATH, 'ossec.log')) @@ -47,7 +49,7 @@ def test_agent_enrollment(clean_environment): # Start the agent enrollment process by restarting the wazuh-agent host_manager.control_service(host='wazuh-master', service='wazuh', state="restarted") host_manager.control_service(host='wazuh-worker1', service='wazuh', state="restarted") - host_manager.get_host('wazuh-agent1').ansible('command', f'service wazuh-agent restart', check=False) + host_manager.control_service(host='wazuh-agent1', service='wazuh', state="restarted") # Run the callback checks for the ossec.log and the cluster.log HostMonitor(inventory_path=inventory_path, diff --git a/tests/system/test_cluster/test_agent_groups/data/guess_group_messages_master.yaml b/tests/system/test_cluster/test_agent_groups/data/guess_group_messages_master.yaml index d7a718a22b..ebd971cabc 100644 --- a/tests/system/test_cluster/test_agent_groups/data/guess_group_messages_master.yaml +++ b/tests/system/test_cluster/test_agent_groups/data/guess_group_messages_master.yaml @@ -1,10 +1,10 @@ wazuh-master: - regex: .*Agent 'AGENT_ID' with file 'merged.mg' MD5 .* path: var/ossec/logs/ossec.log - timeout: 30 + timeout: 60 - regex: ".*Group assigned: 'GROUP_ID'" path: /var/ossec/logs/ossec.log - timeout: 10 + timeout: 60 - regex: .*Agent 'AGENT_ID' group is 'GROUP_ID' path: /var/ossec/logs/ossec.log - timeout: 10 + timeout: 60 diff --git a/tests/system/test_cluster/test_agent_groups/data/guess_group_messages_worker.yaml b/tests/system/test_cluster/test_agent_groups/data/guess_group_messages_worker.yaml index 096bb776d1..a354ee9817 100644 --- a/tests/system/test_cluster/test_agent_groups/data/guess_group_messages_worker.yaml +++ b/tests/system/test_cluster/test_agent_groups/data/guess_group_messages_worker.yaml @@ -2,17 +2,17 @@ wazuh-worker1: - regex: ".*Sending message to master node: '{\"daemon_name\":\"remoted\",\"message\":{\"command\":\"assigngroup\",\ \"parameters\":{\"agent\":\"AGENT_ID\",\"md5\":.*" path: /var/ossec/logs/ossec.log - timeout: 30 + timeout: 60 - regex: ".*Message received from master node: '{\"error\":0,\"message\":\"ok\",\"data\":{\"group\":\"GROUP_ID\"}}'" path: /var/ossec/logs/ossec.log - timeout: 10 + timeout: 60 - regex: .*Agent 'AGENT_ID' group is 'GROUP_ID' path: /var/ossec/logs/ossec.log - timeout: 10 + timeout: 60 wazuh-master: - regex: .*Agent 'AGENT_ID' with file 'merged.mg' MD5 .* path: /var/ossec/logs/ossec.log - timeout: 30 + timeout: 60 - regex: ".*Group assigned: 'GROUP_ID'" path: /var/ossec/logs/ossec.log - timeout: 10 + timeout: 60 diff --git a/tests/system/test_cluster/test_agent_groups/test_assign_agent_to_a_group.py b/tests/system/test_cluster/test_agent_groups/test_assign_agent_to_a_group.py index 390bbc4525..c5e64523f6 100644 --- a/tests/system/test_cluster/test_agent_groups/test_assign_agent_to_a_group.py +++ b/tests/system/test_cluster/test_agent_groups/test_assign_agent_to_a_group.py @@ -66,7 +66,7 @@ # Variables test_group = 'group_test' -timeout = 25 +timeout = 40 # Tests @@ -107,12 +107,15 @@ def test_assign_agent_to_a_group(agent_target, initial_status, clean_environment restart_cluster(test_infra_agents, host_manager) time.sleep(timeout) + # Check that agent status is active in cluster check_agent_status(agent_id, agent_name, agent_ip, AGENT_STATUS_ACTIVE, host_manager, test_infra_managers) if (initial_status == AGENT_STATUS_DISCONNECTED): host_manager.control_service(host='wazuh-agent1', service=WAZUH_SERVICE_PREFIX, state=WAZUH_SERVICES_STOPPED) + time.sleep(timeout) + check_agent_status(agent_id, agent_name, agent_ip, AGENT_STATUS_DISCONNECTED, host_manager, test_infra_managers) try: @@ -123,6 +126,7 @@ def test_assign_agent_to_a_group(agent_target, initial_status, clean_environment assign_agent_to_new_group('wazuh-master', test_group, agent_id, host_manager) time.sleep(timeout) + # Check that agent has group set to group_test on Managers check_agent_groups(agent_id, test_group, test_infra_managers, host_manager) diff --git a/tests/system/test_cluster/test_agent_groups/test_assign_agent_to_a_group_api.py b/tests/system/test_cluster/test_agent_groups/test_assign_agent_to_a_group_api.py index 0567cb03fe..8b59711001 100644 --- a/tests/system/test_cluster/test_agent_groups/test_assign_agent_to_a_group_api.py +++ b/tests/system/test_cluster/test_agent_groups/test_assign_agent_to_a_group_api.py @@ -63,7 +63,7 @@ tmp_path = os.path.join(local_path, 'tmp') # Variables -timeout = 10 +timeout = 30 test_group = 'group_test' diff --git a/tests/system/test_cluster/test_agent_groups/test_assign_groups_guess.py b/tests/system/test_cluster/test_agent_groups/test_assign_groups_guess.py index b59db3ea1a..1afb93165a 100644 --- a/tests/system/test_cluster/test_agent_groups/test_assign_groups_guess.py +++ b/tests/system/test_cluster/test_agent_groups/test_assign_groups_guess.py @@ -276,6 +276,7 @@ def test_guess_multigroups(n_agents, target_node, status_guess_agent_group, clea # Run the callback checks for the ossec.log messages_path = master_messages_path if target_node == 'wazuh-master' else worker_messages_path replace_regex_in_file(['AGENT_ID', 'GROUP_ID'], [agent1_id, expected_group], messages_path) + HostMonitor(inventory_path=inventory_path, messages_path=messages_path, tmp_path=tmp_path).run(update_position=True) diff --git a/tests/system/test_cluster/test_agent_groups/test_group_hash.py b/tests/system/test_cluster/test_agent_groups/test_group_hash.py index be985cbd02..9b670805ff 100644 --- a/tests/system/test_cluster/test_agent_groups/test_group_hash.py +++ b/tests/system/test_cluster/test_agent_groups/test_group_hash.py @@ -107,13 +107,13 @@ def test_group_hash(target_node, group, n_agents, configure_groups, clean_enviro # Restart agent restart_cluster(test_infra_agents, host_manager) - time.sleep(fw.T_10) + time.sleep(fw.T_20) # Assing group for multigroups case for agent in range(n_agents): if group != 'default': assign_agent_to_new_group(test_infra_managers[0], group, agents_data[agent][1], host_manager) - time.sleep(fw.T_10) + time.sleep(fw.T_20) # Calculate global hash expected_global_hash = calculate_global_hash(test_infra_managers[0], host_manager) @@ -129,7 +129,7 @@ def test_group_hash(target_node, group, n_agents, configure_groups, clean_enviro # Unassign one agent from group unassign_agent_from_group(test_infra_managers[0], group, agents_data[0][1], host_manager) - time.sleep(fw.T_10) + time.sleep(fw.T_20) # Calculate global hash expected_global_hash = calculate_global_hash(test_infra_managers[0], host_manager) diff --git a/tests/system/test_cluster/test_agent_groups/test_group_sync_status.py b/tests/system/test_cluster/test_agent_groups/test_group_sync_status.py index 51e3edd7a7..07878fb677 100644 --- a/tests/system/test_cluster/test_agent_groups/test_group_sync_status.py +++ b/tests/system/test_cluster/test_agent_groups/test_group_sync_status.py @@ -59,7 +59,7 @@ '..', '..', 'provisioning', 'enrollment_cluster', 'roles', 'agent-role', 'files', 'ossec.conf') t1_configuration_parameters, t1_configuration_metadata, t1_case_ids = get_test_cases_data(test_cases_yaml) - +TIMEOUT_SECOND_CHECK = 10 @pytest.fixture() def group_creation_and_assignation(metadata, target_node): @@ -141,34 +141,40 @@ def test_group_sync_status(metadata, target_node, clean_environment, group_creat cluster recreates groups without syncreq status. ''' # Delete group folder + delete_agent_group(metadata['delete_target'], metadata['group_folder_deleted'], host_manager, 'folder') # Set values first_time_check = 'synced' second_time_check = '' - # Check each 0.25 seconds/10 seconds sync_status + # Check each 0.10 seconds/10 seconds sync_status for _ in range(T_10): - time.sleep(T_025) - agent1_status = json.loads(execute_wdb_query(query, test_infra_hosts[0], host_manager))[1]['group_sync_status'] - agent2_status = json.loads(execute_wdb_query(query, test_infra_hosts[0], host_manager))[2]['group_sync_status'] + status_info = json.loads(execute_wdb_query(query, test_infra_hosts[0], host_manager))[1:3] + agent1_status = status_info[0]['group_sync_status'] + agent2_status = status_info[1]['group_sync_status'] if metadata['agent_in_group'] == 'agent1': - if 'syncreq' == agent1_status and 'synced' == agent2_status: + if agent1_status == 'syncreq' and agent2_status == 'synced': first_time_check = "syncreq" + break elif metadata['agent_in_group'] == 'agent2': - if 'synced' == agent1_status and 'syncreq' == agent2_status: + if agent1_status == 'synced' and agent2_status == 'syncreq': first_time_check = "syncreq" + break else: if agent1_status == 'syncreq' and agent2_status == 'syncreq': first_time_check = 'syncreq' + break - time.sleep(T_5) + time.sleep(0.10) assert metadata['expected_first_check'] == first_time_check + time.sleep(TIMEOUT_SECOND_CHECK) + # Check after 5 seconds, sync_status if 'syncreq' in execute_wdb_query(query, test_infra_hosts[0], host_manager): second_time_check = 'syncreq' diff --git a/tests/system/test_cluster/test_agent_groups/test_remove_group.py b/tests/system/test_cluster/test_agent_groups/test_remove_group.py index 8e8810df37..44b8188107 100644 --- a/tests/system/test_cluster/test_agent_groups/test_remove_group.py +++ b/tests/system/test_cluster/test_agent_groups/test_remove_group.py @@ -29,7 +29,7 @@ # Variables t1_configuration_parameters, t1_configuration_metadata, t1_case_ids = get_test_cases_data(t1_cases_path) queries = ['sql select `group` from agent;', 'sql select name from `group`;', 'sql select id_group from belongs;'] - +TIMEOUT_GET_GROUPS_ID = 3 # Fixtures @pytest.fixture() @@ -112,6 +112,8 @@ def test_remove_group(metadata, group, target_node, pre_configured_groups, clean messages_path=messages_path, tmp_path=tmp_path).run(update_position=True) + sleep(TIMEOUT_GET_GROUPS_ID) + for manager in test_infra_managers: group_ids[manager] = str(get_group_id(group, manager, host_manager))