diff --git a/.github/workflows/clean_stale_branches.yml b/.github/workflows/clean_stale_branches.yml index 197595a1ff1e..c07ffc962d16 100644 --- a/.github/workflows/clean_stale_branches.yml +++ b/.github/workflows/clean_stale_branches.yml @@ -11,6 +11,7 @@ env: jobs: stale: runs-on: ubuntu-latest + if: github.repository == 'demisto/content' && github.event.pull_request.head.repo.fork == false steps: - name: Checkout uses: actions/checkout@v4 diff --git a/.gitlab/ci/.gitlab-ci.on-push.yml b/.gitlab/ci/.gitlab-ci.on-push.yml index 04bc1a6b3e88..29cbeeefbe2a 100644 --- a/.gitlab/ci/.gitlab-ci.on-push.yml +++ b/.gitlab/ci/.gitlab-ci.on-push.yml @@ -168,6 +168,21 @@ validate-content-conf: - cp "./Tests/conf.json" "${ARTIFACTS_FOLDER_SERVER_TYPE}/conf.json" - section_end "Copy conf.json To Server Type Artifacts Folder" + - section_start "Find dependencies changes" --collapsed + - | + if [[ -z $BUCKET_UPLOAD || $TEST_UPLOAD == "false" ]]; then + source ./Tests/scripts/get_previous_master_sha.sh + if [[ -z $PREVIOUS_MASTER_SHA ]]; then + echo "WARNING: failed to detect previous master SHA, skipping find dependencies changes" + else + echo "Finding pack dependencies diff against $PREVIOUS_MASTER_SHA" + python Tests/scripts/find_pack_dependencies_changes.py --gitlab-token $GITLAB_API_TOKEN --master-sha $PREVIOUS_MASTER_SHA --job-name $CI_JOB_NAME --artifacts-folder "$ARTIFACTS_FOLDER_SERVER_TYPE" + fi + else + echo "Test upload flow - skipping find dependencies changes" + fi + - section_end "Find dependencies changes" + - section_start "Replace Cortex XSOAR" --collapsed - | if [[ $MARKETPLACE_VERSION == "marketplacev2" || $MARKETPLACE_VERSION == "xpanse" ]]; diff --git a/.hooks/bootstrap b/.hooks/bootstrap index 4e87e458e277..86d36d97846a 100755 --- a/.hooks/bootstrap +++ b/.hooks/bootstrap @@ -72,7 +72,7 @@ else if [[ "${should_install_poetry}" == "yes" ]]; then echo "Installing Poetry version:${POETRY_VERSION}" curl -sSL https://install.python-poetry.org | python3 - --version "${POETRY_VERSION}" - exit_on_error $? "Failed to install Poetry version:${POETRY_VERSION}" + error_code=$? if ! command -v poetry >/dev/null 2>&1; then exit_on_error $? "Poetry isn't installed" fi @@ -81,6 +81,10 @@ else else exit_on_error 1 "Poetry version $(poetry --version) doesn't match the required version: ${POETRY_VERSION}" fi + if [ -n "${ARTIFACTS_FOLDER}" ] && [ "${error_code}" -ne 0 ]; then + cp ${PWD}/poetry-installer-error-*.log "${ARTIFACTS_FOLDER}" + fi + exit_on_error $error_code "Failed to install Poetry version:${POETRY_VERSION}" fi fi diff --git a/.pre-commit-config_template.yaml b/.pre-commit-config_template.yaml index 1f232d9e6337..8bff4bdedb7f 100644 --- a/.pre-commit-config_template.yaml +++ b/.pre-commit-config_template.yaml @@ -289,7 +289,7 @@ repos: - decorator==5.1.1 ; python_version >= "3.8" and python_version < "3.11" - defusedxml==0.7.1 ; python_version >= "3.8" and python_version < "3.11" - demisto-py==3.2.13 ; python_version >= "3.8" and python_version < "3.11" - - demisto-sdk==1.26.0 ; python_version >= "3.8" and python_version < "3.11" + - demisto-sdk==1.26.1 ; python_version >= "3.8" and python_version < "3.11" - dictdiffer==0.9.0 ; python_version >= "3.8" and python_version < "3.11" - dictor==0.1.12 ; python_version >= "3.8" and python_version < "3.11" - distlib==0.3.7 ; python_version >= "3.8" and python_version < "3.11" diff --git a/Packs/AWS-SecurityLake/.pack-ignore b/Packs/AWS-SecurityLake/.pack-ignore new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/Packs/AWS-SecurityLake/.secrets-ignore b/Packs/AWS-SecurityLake/.secrets-ignore new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake.py b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake.py new file mode 100644 index 000000000000..4f19ffca41d6 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake.py @@ -0,0 +1,512 @@ +import demistomock as demisto +from CommonServerPython import * +from CommonServerUserPython import * + +from datetime import datetime + +AWS_SERVICE_NAME = "athena" +AWS_SERVICE_NAME_LAKE = "securitylake" +QUERY_DATA_OUTPUTS_KEY = "Query" + + +def parse_rows_response(rows_data: list[dict]) -> list[dict]: + """ + Parse and arrange the 'Rows' data from the response. + + Args: + rows_data (list[dict]): The 'Rows' data from the response. + + Note: + The 'Rows' data is returned in a table format, where each item in the list is a row. + Example for such a response can be seen on 'test_data/raw_data_mock/get_query_results.json' + + Returns: + list[dict]: The data in a parsed and arranged format. + """ + if not rows_data or not rows_data[0].get("Data"): + return [] + + keys: list[str] = [item["VarCharValue"] for item in rows_data[0]["Data"]] + raw_results = [item["Data"] for item in rows_data[1:]] + result_data = [] + + for raw_result in raw_results: + current_item_data = {} + + for idx, value in enumerate(raw_result): + if "VarCharValue" in value: + current_item_data[keys[idx]] = value["VarCharValue"] + + result_data.append(current_item_data) + + return result_data + + +def determine_client_service_name(command: str): + """determines the needed client service name based on the command. + + Args: + command (str): command name being called. + + Returns: + _type_: service name based on the command. + """ + if command in ["aws-security-lake-data-sources-list", "aws-security-lake-data-lakes-list"]: + return AWS_SERVICE_NAME_LAKE + else: + return AWS_SERVICE_NAME + + +def next_token_output_dict(outputs_prefix: str, next_token: str | None, page_outputs: Any, page_outputs_key: str): + """Creates a dict for CommandResults.output with the next token.""" + outputs = { + f"AWS.SecurityLake.{outputs_prefix}(val.{page_outputs_key} && val.{page_outputs_key} == obj.{page_outputs_key})": page_outputs, # noqa: E501 + "AWS.SecurityLake(true)": {f"{outputs_prefix}NextToken": next_token}, + } + + return remove_empty_elements(outputs) + + +def parse_table_metadata(table_metadata_list: list): + """Formats dates in the table metadata from the response. + + Args: + table_metadata_list (list): the raw metadata returned from API. + """ + for metadata in table_metadata_list: + if create_time := metadata.get("CreateTime"): + metadata["CreateTime"] = create_time.strftime("%Y-%m-%d %H:%M:%S") + if last_access_time := metadata.get("LastAccessTime"): + metadata["LastAccessTime"] = last_access_time.strftime("%Y-%m-%d %H:%M:%S") + + +# --- API Call Functions --- # + + +def start_query_execution( + client, + query_string: str, + query_limit: int | None = None, + client_request_token: str | None = None, + database: str | None = None, + output_location: str | None = None, + encryption_option: str | None = None, + kms_key: str | None = None, + work_group: str | None = None, +) -> dict: + if query_limit and "LIMIT" not in query_string: + query_string = f"{query_string} LIMIT {query_limit}" + + kwargs: dict[str, Any] = {"QueryString": query_string} + + if client_request_token: + kwargs.update({"ClientRequestToken": client_request_token}) + + if database: + kwargs.update({"QueryExecutionContext": {"Database": database}}) + + if output_location: + kwargs.update({"ResultConfiguration": {"OutputLocation": output_location}}) + + if encryption_option: + kwargs.update({"ResultConfiguration": {"EncryptionConfiguration": {"EncryptionOption": encryption_option}}}) + + if kms_key: + kwargs.update({"ResultConfiguration": {"EncryptionConfiguration": {"KmsKey": kms_key}}}) + + if work_group: + kwargs.update({"WorkGroup": work_group}) + + return client.start_query_execution(**kwargs) + + +def get_query_execution(client, query_execution_id: str) -> dict: + response = client.get_query_execution(QueryExecutionId=query_execution_id) + + # Convert datetime objects to strings + if (datetime_value := response.get("QueryExecution", {}).get("Status", {}).get("SubmissionDateTime")) and isinstance( + datetime_value, datetime + ): + response["QueryExecution"]["Status"]["SubmissionDateTime"] = datetime_value.isoformat() + + if (datetime_value := response.get("QueryExecution", {}).get("Status", {}).get("CompletionDateTime")) and isinstance( + datetime_value, datetime + ): + response["QueryExecution"]["Status"]["CompletionDateTime"] = datetime_value.isoformat() + + return response["QueryExecution"] + + +def get_query_results(client, query_execution_id: str) -> list[dict]: + raw_response = client.get_query_results(QueryExecutionId=query_execution_id) + parsed_response = parse_rows_response(rows_data=raw_response["ResultSet"]["Rows"]) + + for result_item in parsed_response: + result_item["query_execution_id"] = query_execution_id + + return parsed_response + + +# --- Command Functions --- # + + +def module_test_command(client) -> str | CommandResults: + response = client.list_named_queries() + if response["ResponseMetadata"]["HTTPStatusCode"] == 200: + return "ok" + + else: + raise DemistoException(f"Error: {response}") + + +@polling_function( + name=demisto.command(), + interval=arg_to_number(demisto.args().get("interval_in_seconds", 10)), + timeout=arg_to_number(demisto.args().get("timeout_in_seconds", 300)), + requires_polling_arg=False, +) +def execute_query_command(args: dict, query_results_context_key: str, client): + if "QueryExecutionId" not in args: + start_query_response = start_query_execution( + client=client, + query_string=args["query_string"], + query_limit=args.get("query_limit"), + client_request_token=args.get("client_request_token"), + database=args.get("database"), + output_location=args.get("output_location"), + encryption_option=args.get("encryption_option"), + kms_key=args.get("kms_key"), + work_group=args.get("work_group"), + ) + query_execution_id = start_query_response["QueryExecutionId"] + + else: + query_execution_id = args["QueryExecutionId"] + + query_execution_response = get_query_execution(client=client, query_execution_id=query_execution_id) + query_state = query_execution_response["Status"]["State"] + + if query_state in ("QUEUED", "RUNNING"): + args["QueryExecutionId"] = query_execution_id + + return PollResult( + response=None, + continue_to_poll=True, + args_for_next_run=args, + partial_result=CommandResults(readable_output=f"Query is still running. Current state: '{query_state}'."), + ) + + output_data: dict[str, Any] = {f"AWS.SecurityLake.{QUERY_DATA_OUTPUTS_KEY}": query_execution_response} + readable_output = None + + if query_state == "SUCCEEDED": + query_results_response = get_query_results(client=client, query_execution_id=query_execution_id) + output_data[f"AWS.SecurityLake.{query_results_context_key}"] = query_results_response + readable_output = tableToMarkdown("AWS Athena Query Results", query_results_response) + + elif query_state == "CANCELLED": + readable_output = f"Query '{query_execution_id}' has been cancelled." + + elif query_state == "FAILED": + readable_output = f"Query '{query_execution_id}' has failed." + demisto.debug(str(query_execution_response)) + if query_execution_response["Status"].get("AthenaError", {}).get("ErrorMessage"): + error_message = query_execution_response["Status"]["AthenaError"]["ErrorMessage"] + readable_output += f"\nError: {error_message}" + + return PollResult( + response=CommandResults( + outputs=output_data, + raw_response=output_data, + readable_output=readable_output, + ), + continue_to_poll=False, + ) + + +def list_catalogs_command(client, args: dict): + """Lists the data catalogs in the current Amazon Web Services account. + + Args: + client : aws client object + args (dict): command argument - nextToken, limit, workGroup + """ + + args_to_request = { + "NextToken": args.get("next_token"), + "MaxResults": arg_to_number(args.get("limit")), + "WorkGroup": args.get("work_group"), + } + + response = client.list_data_catalogs(**remove_empty_elements(args_to_request)) + + catalogs = response.get("DataCatalogsSummary") + next_token = response.get("NextToken") + context_output = next_token_output_dict("Catalog", next_token, catalogs, "CatalogName") + + return CommandResults( + outputs=context_output, + raw_response=response, + readable_output=tableToMarkdown( + "AWS Security Lake Catalogs", response.get("DataCatalogsSummary"), headerTransform=pascalToSpace, removeNull=True + ), + ) + + +def list_databases_command(client, args: dict): + """Lists the databases in the specified data catalog. + Args: + client : aws client object + args (dict): command argument + """ + args_to_request = { + "NextToken": args.get("next_token"), + "MaxResults": arg_to_number(args.get("limit")), + "WorkGroup": args.get("work_group"), + "CatalogName": args.get("catalog_name"), + } + + response = client.list_databases(**remove_empty_elements(args_to_request)) + + databases = response.get("DatabaseList") + next_token = response.get("NextToken") + context_output = next_token_output_dict("Database", next_token, databases, "Name") + + return CommandResults( + outputs=context_output, + raw_response=response, + readable_output=tableToMarkdown( + "AWS Security Lake Databases", + response.get("DatabaseList"), + headers=["Name"], + headerTransform=pascalToSpace, + removeNull=True, + ), + ) + + +def list_table_metadata_command(client, args: dict): + """Lists the metadata for the tables in the specified data catalog database. + + Args: + client : aws client object + args (dict): command argument + """ + + args_to_request = { + "NextToken": args.get("next_token"), + "MaxResults": arg_to_number(args.get("limit")), + "WorkGroup": args.get("work_group"), + "CatalogName": args.get("catalog_name"), + "DatabaseName": args.get("database_name"), + "Expression": args.get("expression"), + } + + response = client.list_table_metadata(**remove_empty_elements(args_to_request)) + parse_table_metadata(response.get("TableMetadataList")) + + metadata_list = response.get("TableMetadataList") + next_token = response.get("NextToken") + context_output = next_token_output_dict("TableMetadata", next_token, metadata_list, "Name") + + return CommandResults( + outputs=context_output, + raw_response=response, + readable_output=tableToMarkdown( + "AWS Security Lake Databases", + metadata_list, + headers=["Name", "TableType", "Columns", "PartitionKeys"], + headerTransform=pascalToSpace, + removeNull=True, + ), + ) + + +def mfalogin_query_command(client, args: dict): + """Running aws-security-lake-query-execute command with query_string: + SELECT * FROM <{database}>.<{table}> + WHERE CAST(actor.user.name AS VARCHAR) = '{user_name}'; + + Args: + client : aws client object + args (dict): command argument + """ + database = args.get("database") + table = args.get("table") + user_name = args.get("user_name") + args["query_string"] = f"SELECT * FROM {database}.{table} WHERE CAST(actor.user.name AS VARCHAR) = '{user_name}';" + result = execute_query_command(client=client, args=args, query_results_context_key="MfaLoginQueryResults") + return result + + +def source_ip_query_command(client, args: dict): + """Running aws-security-lake-query-execute command with query_string: + SELECT * FROM <{database}>.<{table}> + WHERE CAST(src_endpoint.ip AS VARCHAR) = '{ip_src}'; + + Args: + client : aws client object + args (dict): command argument + """ + database = args.get("database") + table = args.get("table") + ip_src = args.get("ip_src") + args["query_string"] = f"SELECT * FROM {database}.{table} WHERE CAST(src_endpoint.ip AS VARCHAR) = '{ip_src}';" + return execute_query_command(client=client, args=args, query_results_context_key="SourceIPQueryResults") + + +def guardduty_activity_query_command(client, args: dict): + """Running aws-security-lake-query-execute command with query_string: + SELECT * FROM <{database}>.<{table}> WHERE severity = '{severity}'; + + Args: + client : aws client object + args (dict): command argument + """ + database = args.get("database") + table = args.get("table") + severity = args.get("severity") + args["query_string"] = f"SELECT * FROM {database}.{table} WHERE severity = '{severity}';" + return execute_query_command(client=client, args=args, query_results_context_key="GuardDutyActivityQueryResults") + + +def list_sources_command(client, args: dict): + """Retrieves a snapshot of the current Region. + + Args: + client : aws client object + args (dict): command argument + """ + args_to_request = { + "accounts": argToList(args.get("accounts")), + "maxResults": arg_to_number(args.get("limit")), + "nextToken": args.get("next_token"), + } + + response = client.get_data_lake_sources(**remove_empty_elements(args_to_request)) + + next_token = response.get("nextToken") + outputs = { + "AWS.SecurityLake.DataLakeSource.DataLakeArn": response.get("dataLakeArn"), + "AWS.SecurityLake.DataLakeSource.DataLakeSources": response.get("dataLakeSources"), + "AWS.SecurityLake(true)": {"DataLakeSourceNextToken": next_token}, + } + + return CommandResults( + outputs=remove_empty_elements(outputs), + raw_response=response, + readable_output=tableToMarkdown( + "AWS Security Lake Catalogs", + response.get("dataLakeSources"), + headers=["account", "sourceName"], + headerTransform=pascalToSpace, + removeNull=True, + ), + ) + + +def list_data_lakes_command(client, args: dict): + """Retrieves the Amazon Security Lake configuration object for the specified Amazon Web Services Regions. + + Args: + client : aws client object + args (dict): command argument + """ + + response = client.list_data_lakes(regions=argToList(args.get("regions"))) + outputs = remove_empty_elements(response.get("dataLakes")) + return CommandResults( + outputs_prefix="AWS.SecurityLake.DataLake", + outputs_key_field="dataLakeArn", + outputs=outputs, + raw_response=response, + readable_output=tableToMarkdown("AWS Security Lake Data Lakes", outputs, headerTransform=pascalToSpace), + ) + + +def main(): # pragma: no cover + params = demisto.params() + args = demisto.args() + command = demisto.command() + + aws_role_arn = params.get("roleArn") + aws_role_session_name = params.get("roleSessionName") + aws_default_region = params.get("defaultRegion") + aws_role_session_duration = params.get("sessionDuration") + aws_access_key_id = demisto.get(params, "credentials.identifier") + aws_secret_access_key = demisto.get(params, "credentials.password") + verify_certificate = not params.get("insecure", True) + timeout = params.get("timeout") + retries = params.get("retries", 5) + + validate_params(aws_default_region, aws_role_arn, aws_role_session_name, aws_access_key_id, aws_secret_access_key) + + try: + demisto.debug(f"Command being called is '{command}'.") + + aws_client = AWSClient( + aws_default_region=aws_default_region, + aws_role_arn=aws_role_arn, + aws_role_session_name=aws_role_session_name, + aws_role_session_duration=aws_role_session_duration, + aws_role_policy=None, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + verify_certificate=verify_certificate, + timeout=timeout, + retries=retries, + ) + + service = determine_client_service_name(command=command) + + client = aws_client.aws_session( + service=service, + region=args.get("region"), + role_arn=args.get("roleArn"), + role_session_name=args.get("roleSessionName"), + role_session_duration=args.get("roleSessionDuration"), + ) + + result: str | CommandResults + + if command == "test-module": + result = module_test_command(client) + + elif command == "aws-security-lake-query-execute": + result = execute_query_command(client=client, args=args, query_results_context_key="QueryResults") # type: ignore + + elif command == "aws-security-lake-data-catalogs-list": + result = list_catalogs_command(client=client, args=args) + + elif command == "aws-security-lake-databases-list": + result = list_databases_command(client=client, args=args) + + elif command == "aws-security-lake-table-metadata-list": + result = list_table_metadata_command(client=client, args=args) + + elif command == "aws-security-lake-user-mfalogin-query": + result = mfalogin_query_command(client=client, args=args) # type: ignore + + elif command == "aws-security-lake-source-ip-query": + result = source_ip_query_command(client=client, args=args) # type: ignore + + elif command == "aws-security-lake-guardduty-activity-query": + result = guardduty_activity_query_command(client=client, args=args) # type: ignore + + elif command == "aws-security-lake-data-sources-list": + result = list_sources_command(client=client, args=args) + + elif command == "aws-security-lake-data-lakes-list": + result = list_data_lakes_command(client=client, args=args) + else: + raise NotImplementedError(f'Command "{command}" is not implemented.') + return_results(result) + + except Exception as e: + return_error(str(e)) + + +from AWSApiModule import * # noqa: E402 + +if __name__ in ("__main__", "__builtin__", "builtins"): + main() diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake.yml b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake.yml new file mode 100644 index 000000000000..388884a3dfca --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake.yml @@ -0,0 +1,1210 @@ +category: IT Services +commonfields: + id: AWS Security Lake + version: -1 +configuration: +- name: roleArn + display: Role ARN + required: false + type: 0 + section: Connect +- name: roleSessionName + display: Role Session Name + required: false + type: 0 + section: Connect +- name: sessionDuration + display: Role Session Duration + required: false + type: 0 + section: Connect + advanced: true +- name: defaultRegion + display: AWS Default Region + required: false + type: 15 + options: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + - us-gov-east-1 + - us-gov-west-1 + section: Connect +- name: credentials + display: Access Key + required: false + type: 9 + displaypassword: Secret Key + hiddenusername: false + section: Connect +- name: timeout + display: Timeout + required: false + defaultvalue: 60,10 + type: 0 + section: Connect + advanced: true + additionalinfo: The time in seconds until a timeout exception is reached. You can specify just the read timeout (for example 60) or also the connect timeout followed after a comma (for example 60,10). If a connect timeout is not specified, a default of 10 second will be used. +- name: retries + display: Retries + required: false + defaultvalue: 5 + type: 0 + section: Connect + advanced: true + additionalinfo: "The maximum number of retry attempts when connection or throttling errors are encountered. Set to 0 to disable retries. The default value is 5 and the limit is 10. Note: Increasing the number of retries will increase the execution time." +- name: insecure + display: Trust any certificate (not secure) + required: false + defaultvalue: 'false' + type: 8 + advanced: true +- name: proxy + display: Use system proxy settings + required: false + defaultvalue: 'false' + type: 8 + advanced: true +description: "Amazon Security Lake is a fully managed security data lake service." +display: AWS-SecurityLake +name: AWS Security Lake +script: + commands: + - name: aws-security-lake-query-execute + arguments: + - name: query_string + description: The SQL query statements to be executed. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: query_limit + description: A limit (number) to use for the query. If the keyword 'LIMIT' exists within 'QueryString', this parameter will be ignored. + required: false + isArray: false + defaultValue: "50" + predefined: + - "" + - name: client_request_token + description: A unique case-sensitive string used to ensure the request to create the query is idempotent (executes only once). If another StartQueryExecution request is received, the same response is returned and another query is not created. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: database + description: The name of the database. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: output_location + description: The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: encryption_option + description: Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: kms_key + description: For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: work_group + description: The name of the workgroup in which the query is being started. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleArn + description: The Amazon Resource Name (ARN) of the role to assume. + - name: roleSessionName + description: An identifier for the assumed role session. + - name: roleSessionDuration + description: The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. + - name: region + description: The AWS region. If not specified, the default region will be used. + - name: QueryExecutionId + hidden: true + description: ID of the newly created query. Used internally for polling. + - name: hide_polling_output + hidden: true + description: "" + description: Execute a new query, wait for the query to complete (using polling), and return query's execution information, and query's results (if successful). Either 'OutputLocation' or 'WorkGroup' must be specified for the query to run. + polling: true + outputs: + - contextPath: AWS.Athena.Query.QueryExecutionId + description: The unique identifier for each query execution. + type: String + - contextPath: AWS.Athena.Query.Query + description: The SQL query statements which the query execution ran. + type: String + - contextPath: AWS.Athena.Query.StatementType + description: The type of query statement that was run. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.OutputLocation + description: The location in Amazon S3 where your query and calculation results are stored, such as 's3://path/to/query/bucket/'. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.EncryptionOption + description: If query and calculation results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE_KMS or CSE_KMS) and key information. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.KmsKey + description: For SSE_KMS and CSE_KMS, this is the KMS key ARN or ID. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.ExpectedBucketOwner + description: The Amazon Web Services account ID that you expect to be the owner of the Amazon S3 bucket specified by ResultConfiguration.OutputLocation. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.AclConfiguration.S3AclOption + description: The Amazon S3 canned ACL that Athena should specify when storing query results. + type: String + - contextPath: AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.Enabled + description: True if previous query results can be reused when the query is run; otherwise, false. The default is false. + type: Boolean + - contextPath: AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.MaxAgeInMinutes + description: Specifies, in minutes, the maximum age of a previous query result that Athena should consider for reuse. The default is 60. + type: Number + - contextPath: AWS.Athena.Query.QueryExecutionContext.Database + description: The name of the database used in the query execution. + type: String + - contextPath: AWS.Athena.Query.QueryExecutionContext.Catalog + description: The name of the data catalog used in the query execution. + type: String + - contextPath: AWS.Athena.Query.Status.State + description: The state of the query execution. + type: String + - contextPath: AWS.Athena.Query.Status.StateChangeReason + description: Further detail about the status of the query. + type: String + - contextPath: AWS.Athena.Query.Status.SubmissionDateTime + description: The date and time that the query was submitted. + type: String + - contextPath: AWS.Athena.Query.Status.CompletionDateTime + description: The date and time that the query completed. + type: String + - contextPath: AWS.Athena.Query.Status.AthenaError.ErrorCategory + description: An integer value that specifies the category of a query failure error. + type: Number + - contextPath: AWS.Athena.Query.Status.AthenaError.ErrorType + description: An integer value that provides specific information about an Athena query error. For the meaning of specific values, see the Error Type Reference in the Amazon Athena User Guide. + type: Number + - contextPath: AWS.Athena.Query.Status.AthenaError.Retryable + description: True if the query might succeed if resubmitted. + type: Boolean + - contextPath: AWS.Athena.Query.Status.AthenaError.ErrorMessage + description: Contains a short description of the error that occurred. + type: String + - contextPath: AWS.Athena.Query.Statistics.EngineExecutionTimeInMillis + description: The number of milliseconds that the query took to execute. + type: Number + - contextPath: AWS.Athena.Query.Statistics.DataScannedInBytes + description: The number of bytes in the data that was queried. + type: Number + - contextPath: AWS.Athena.Query.Statistics.DataManifestLocation + description: The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. + type: String + - contextPath: AWS.Athena.Query.Statistics.TotalExecutionTimeInMillis + description: The number of milliseconds that Athena took to run the query. + type: Number + - contextPath: AWS.Athena.Query.Statistics.QueryQueueTimeInMillis + description: The number of milliseconds that the query was in your query queue waiting for resources. + type: Number + - contextPath: AWS.Athena.Query.Statistics.ServicePreProcessingTimeInMillis + description: The number of milliseconds that Athena took to preprocess the query before submitting the query to the query engine. + type: Number + - contextPath: AWS.Athena.Query.Statistics.QueryPlanningTimeInMillis + description: The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. + type: Number + - contextPath: AWS.Athena.Query.Statistics.ServiceProcessingTimeInMillis + description: The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query. + type: Number + - contextPath: AWS.Athena.Query.ResultReuseInformation.ReusedPreviousResult + description: True if a previous query result was reused; false if the result was generated from a new run of the query. + type: Boolean + - contextPath: AWS.Athena.Query.WorkGroup + description: The name of the workgroup in which the query ran. + type: String + - contextPath: AWS.Athena.Query.EngineVersion.SelectedEngineVersion + description: The engine version requested by the user. Possible values are determined by the output of ListEngineVersions, including AUTO. + type: String + - contextPath: AWS.Athena.Query.EngineVersion.EffectiveEngineVersion + description: The engine version on which the query runs. + type: String + - contextPath: AWS.Athena.Query.ExecutionParameters + description: A list of values for the parameters in a query. The values are applied sequentially to the parameters in the query in the order in which the parameters occur. The list of parameters is not returned in the response. + type: List + - contextPath: AWS.Athena.Query.SubstatementType + description: The type of query statement that was run. + type: String + - contextPath: AWS.Athena.QueryResults + description: List of query results. + type: List + - name: aws-security-lake-data-catalogs-list + description: Lists the data catalogs in the current Amazon Web Services account. + deprecated: false + arguments: + - name: work_group + description: The name of the workgroup. Required if making an IAM Identity Center request. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: region + description: The AWS region. If not specified, the default region will be used. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleArn + description: The Amazon Resource Name (ARN) of the role to assume. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionName + description: An identifier for the assumed role session. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionDuration + description: The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: limit + description: Specifies the maximum number of data catalogs to return. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: next_token + description: Specifies the maximum number of data catalogs to return. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + outputs: + - contextPath: AWS.SecurityLake.Catalog.CatalogName + description: The name of the data catalog. + type: String + - contextPath: AWS.SecurityLake.Catalog.Type + description: The data catalog type. + type: String + - contextPath: AWS.SecurityLake.CatalogNextToken + description: A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. + type: String + - name: aws-security-lake-databases-list + description: Lists the databases in the specified data catalog. + deprecated: false + arguments: + - name: catalog_name + description: The name of the data catalog that contains the databases to return. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: work_group + description: The name of the workgroup for which the metadata is being fetched. Required if requesting an IAM Identity Center enabled Glue Data Catalog. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: region + description: The AWS region. If not specified, the default region will be used. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleArn + description: The Amazon Resource Name (ARN) of the role to assume. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionName + description: An identifier for the assumed role session. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionDuration + description: The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: limit + description: Specifies the maximum number of results to return. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: next_token + description: A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + outputs: + - contextPath: AWS.SecurityLake.Database.Name + description: The name of the database. + type: String + - contextPath: AWS.SecurityLake.Database.Description + description: An optional description of the database. + type: String + - contextPath: AWS.SecurityLake.Database.Parameters + description: A set of custom key/value pairs. + type: List + - contextPath: AWS.SecurityLake.DatabaseNextToken + description: A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. + type: String + - name: aws-security-lake-table-metadata-list + description: Lists the metadata for the tables in the specified data catalog database. + deprecated: false + arguments: + - name: catalog_name + description: The name of the data catalog that contains the databases to return. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: database_name + description: The name of the database for which table metadata should be returned. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: expression + description: A regex filter that pattern-matches table names. If no expression is supplied, metadata for all tables are listed. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleArn + description: The Amazon Resource Name (ARN) of the role to assume. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionName + description: An identifier for the assumed role session. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionDuration + description: The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: limit + description: Specifies the maximum number of results to return. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: next_token + description: A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: work_group + description: The name of the workgroup for which the metadata is being fetched. Required if requesting an IAM Identity Center enabled Glue Data Catalog. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + outputs: + - contextPath: AWS.SecurityLake.TableMetadata.Name + description: The name of the table. + type: String + - contextPath: AWS.SecurityLake.TableMetadata.CreateTime + description: The time that the table was created. + type: Date + - contextPath: AWS.SecurityLake.TableMetadata.LastAccessTime + description: The last time the table was accessed. + type: Date + - contextPath: AWS.SecurityLake.TableMetadata.TableType + description: The type of table. In Athena, only EXTERNAL_TABLE is supported. + type: String + - contextPath: AWS.SecurityLake.TableMetadata.Columns.Name + description: The name of the column. + type: String + - contextPath: AWS.SecurityLake.TableMetadata.Columns.Type + description: The data type of the column. + type: String + - contextPath: AWS.SecurityLake.TableMetadata.Columns.Comment + description: Optional information about the column. + type: String + - contextPath: AWS.SecurityLake.TableMetadata.PartitionKeys.Name + description: The name of the column. + type: String + - contextPath: AWS.SecurityLake.TableMetadata.PartitionKeys.Type + description: The data type of the column. + type: String + - contextPath: AWS.SecurityLake.TableMetadata.PartitionKeys.Comment + description: Optional information about the column. + type: String + - contextPath: AWS.SecurityLake.TableMetadata.Parameters + description: A set of custom key/value pairs for table properties. + type: List + - contextPath: AWS.SecurityLake.TableMetadataNextToken + description: A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. + type: String + - name: aws-security-lake-user-mfalogin-query + description: Runs query that takes a provided username and queries the AWS Security Lake for MFA login attempts (Success/Failed) associated with the user's account, using AWS CloudTrail logs. + deprecated: false + arguments: + - name: database + description: The database to run the query against. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: table + description: The table to run the query against. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: user_name + description: The username to search for MFA login attempts. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: output_location + description: The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleArn + description: The Amazon Resource Name (ARN) of the role to assume. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionName + description: An identifier for the assumed role session. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionDuration + description: The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: region + description: The AWS region. If not specified, the default region will be used. + - name: query_limit + description: A limit (number) to use for the query. If the keyword 'LIMIT' exists within 'QueryString', this parameter will be ignored. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: QueryExecutionId + hidden: true + description: ID of the newly created query. Used internally for polling. + - name: hide_polling_output + hidden: true + description: "" + polling: true + outputs: + - contextPath: AWS.Athena.Query.QueryExecutionId + description: The unique identifier for each query execution. + type: String + - contextPath: AWS.Athena.Query.Query + description: The SQL query statements which the query execution ran. + type: String + - contextPath: AWS.Athena.Query.StatementType + description: The type of query statement that was run. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.OutputLocation + description: The location in Amazon S3 where your query and calculation results are stored, such as 's3://path/to/query/bucket/'. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.EncryptionOption + description: If query and calculation results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE_KMS or CSE_KMS) and key information. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.KmsKey + description: For SSE_KMS and CSE_KMS, this is the KMS key ARN or ID. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.ExpectedBucketOwner + description: The Amazon Web Services account ID that you expect to be the owner of the Amazon S3 bucket specified by ResultConfiguration.OutputLocation. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.AclConfiguration.S3AclOption + description: The Amazon S3 canned ACL that Athena should specify when storing query results. + type: String + - contextPath: AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.Enabled + description: True if previous query results can be reused when the query is run; otherwise, false. The default is false. + type: Boolean + - contextPath: AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.MaxAgeInMinutes + description: Specifies, in minutes, the maximum age of a previous query result that Athena should consider for reuse. The default is 60. + type: Number + - contextPath: AWS.Athena.Query.QueryExecutionContext.Database + description: The name of the database used in the query execution. + type: String + - contextPath: AWS.Athena.Query.QueryExecutionContext.Catalog + description: The name of the data catalog used in the query execution. + type: String + - contextPath: AWS.Athena.Query.Status.State + description: The state of the query execution. + type: String + - contextPath: AWS.Athena.Query.Status.StateChangeReason + description: Further detail about the status of the query. + type: String + - contextPath: AWS.Athena.Query.Status.SubmissionDateTime + description: The date and time that the query was submitted. + type: String + - contextPath: AWS.Athena.Query.Status.CompletionDateTime + description: The date and time that the query completed. + type: String + - contextPath: AWS.Athena.Query.Status.AthenaError.ErrorCategory + description: An integer value that specifies the category of a query failure error. + type: Number + - contextPath: AWS.Athena.Query.Status.AthenaError.ErrorType + description: An integer value that provides specific information about an Athena query error. For the meaning of specific values, see the Error Type Reference in the Amazon Athena User Guide. + type: Number + - contextPath: AWS.Athena.Query.Status.AthenaError.Retryable + description: True if the query might succeed if resubmitted. + type: Boolean + - contextPath: AWS.Athena.Query.Status.AthenaError.ErrorMessage + description: Contains a short description of the error that occurred. + type: String + - contextPath: AWS.Athena.Query.Statistics.EngineExecutionTimeInMillis + description: The number of milliseconds that the query took to execute. + type: Number + - contextPath: AWS.Athena.Query.Statistics.DataScannedInBytes + description: The number of bytes in the data that was queried. + type: Number + - contextPath: AWS.Athena.Query.Statistics.DataManifestLocation + description: The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. + type: String + - contextPath: AWS.Athena.Query.Statistics.TotalExecutionTimeInMillis + description: The number of milliseconds that Athena took to run the query. + type: Number + - contextPath: AWS.Athena.Query.Statistics.QueryQueueTimeInMillis + description: The number of milliseconds that the query was in your query queue waiting for resources. + type: Number + - contextPath: AWS.Athena.Query.Statistics.ServicePreProcessingTimeInMillis + description: The number of milliseconds that Athena took to preprocess the query before submitting the query to the query engine. + type: Number + - contextPath: AWS.Athena.Query.Statistics.QueryPlanningTimeInMillis + description: The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. + type: Number + - contextPath: AWS.Athena.Query.Statistics.ServiceProcessingTimeInMillis + description: The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query. + type: Number + - contextPath: AWS.Athena.Query.ResultReuseInformation.ReusedPreviousResult + description: True if a previous query result was reused; false if the result was generated from a new run of the query. + type: Boolean + - contextPath: AWS.Athena.Query.WorkGroup + description: The name of the workgroup in which the query ran. + type: String + - contextPath: AWS.Athena.Query.EngineVersion.SelectedEngineVersion + description: The engine version requested by the user. Possible values are determined by the output of ListEngineVersions, including AUTO. + type: String + - contextPath: AWS.Athena.Query.EngineVersion.EffectiveEngineVersion + description: The engine version on which the query runs. + type: String + - contextPath: AWS.Athena.Query.ExecutionParameters + description: A list of values for the parameters in a query. The values are applied sequentially to the parameters in the query in the order in which the parameters occur. The list of parameters is not returned in the response. + type: List + - contextPath: AWS.Athena.Query.SubstatementType + description: The type of query statement that was run. + type: String + - contextPath: AWS.Athena.MfaLoginQueryResults + description: List of query results. + type: List + - name: aws-security-lake-source-ip-query + description: Runs a query that takes a provided source IP address and queries the AWS Security Lake for console login attempts (Success/Failed) associated with the IP address, using AWS CloudTrail logs. + deprecated: false + arguments: + - name: database + description: The database to run the query against. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: table + description: The table to run the query against. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: ip_src + description: The source IP address to search for console login attempts. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: output_location + description: The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleArn + description: The Amazon Resource Name (ARN) of the role to assume. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionName + description: An identifier for the assumed role session. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionDuration + description: The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: region + description: The AWS region. If not specified, the default region will be used. + - name: query_limit + description: A limit (number) to use for the query. If the keyword 'LIMIT' exists within 'QueryString', this parameter will be ignored. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: QueryExecutionId + hidden: true + description: ID of the newly created query. Used internally for polling. + - name: hide_polling_output + hidden: true + description: "" + polling: true + outputs: + - contextPath: AWS.Athena.Query.QueryExecutionId + description: The unique identifier for each query execution. + type: String + - contextPath: AWS.Athena.Query.Query + description: The SQL query statements which the query execution ran. + type: String + - contextPath: AWS.Athena.Query.StatementType + description: The type of query statement that was run. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.OutputLocation + description: The location in Amazon S3 where your query and calculation results are stored, such as 's3://path/to/query/bucket/'. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.EncryptionOption + description: If query and calculation results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE_KMS or CSE_KMS) and key information. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.KmsKey + description: For SSE_KMS and CSE_KMS, this is the KMS key ARN or ID. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.ExpectedBucketOwner + description: The Amazon Web Services account ID that you expect to be the owner of the Amazon S3 bucket specified by ResultConfiguration.OutputLocation. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.AclConfiguration.S3AclOption + description: The Amazon S3 canned ACL that Athena should specify when storing query results. + type: String + - contextPath: AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.Enabled + description: True if previous query results can be reused when the query is run; otherwise, false. The default is false. + type: Boolean + - contextPath: AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.MaxAgeInMinutes + description: Specifies, in minutes, the maximum age of a previous query result that Athena should consider for reuse. The default is 60. + type: Number + - contextPath: AWS.Athena.Query.QueryExecutionContext.Database + description: The name of the database used in the query execution. + type: String + - contextPath: AWS.Athena.Query.QueryExecutionContext.Catalog + description: The name of the data catalog used in the query execution. + type: String + - contextPath: AWS.Athena.Query.Status.State + description: The state of the query execution. + type: String + - contextPath: AWS.Athena.Query.Status.StateChangeReason + description: Further detail about the status of the query. + type: String + - contextPath: AWS.Athena.Query.Status.SubmissionDateTime + description: The date and time that the query was submitted. + type: String + - contextPath: AWS.Athena.Query.Status.CompletionDateTime + description: The date and time that the query completed. + type: String + - contextPath: AWS.Athena.Query.Status.AthenaError.ErrorCategory + description: An integer value that specifies the category of a query failure error. + type: Number + - contextPath: AWS.Athena.Query.Status.AthenaError.ErrorType + description: An integer value that provides specific information about an Athena query error. For the meaning of specific values, see the Error Type Reference in the Amazon Athena User Guide. + type: Number + - contextPath: AWS.Athena.Query.Status.AthenaError.Retryable + description: True if the query might succeed if resubmitted. + type: Boolean + - contextPath: AWS.Athena.Query.Status.AthenaError.ErrorMessage + description: Contains a short description of the error that occurred. + type: String + - contextPath: AWS.Athena.Query.Statistics.EngineExecutionTimeInMillis + description: The number of milliseconds that the query took to execute. + type: Number + - contextPath: AWS.Athena.Query.Statistics.DataScannedInBytes + description: The number of bytes in the data that was queried. + type: Number + - contextPath: AWS.Athena.Query.Statistics.DataManifestLocation + description: The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. + type: String + - contextPath: AWS.Athena.Query.Statistics.TotalExecutionTimeInMillis + description: The number of milliseconds that Athena took to run the query. + type: Number + - contextPath: AWS.Athena.Query.Statistics.QueryQueueTimeInMillis + description: The number of milliseconds that the query was in your query queue waiting for resources. + type: Number + - contextPath: AWS.Athena.Query.Statistics.ServicePreProcessingTimeInMillis + description: The number of milliseconds that Athena took to preprocess the query before submitting the query to the query engine. + type: Number + - contextPath: AWS.Athena.Query.Statistics.QueryPlanningTimeInMillis + description: The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. + type: Number + - contextPath: AWS.Athena.Query.Statistics.ServiceProcessingTimeInMillis + description: The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query. + type: Number + - contextPath: AWS.Athena.Query.ResultReuseInformation.ReusedPreviousResult + description: True if a previous query result was reused; false if the result was generated from a new run of the query. + type: Boolean + - contextPath: AWS.Athena.Query.WorkGroup + description: The name of the workgroup in which the query ran. + type: String + - contextPath: AWS.Athena.Query.EngineVersion.SelectedEngineVersion + description: The engine version requested by the user. Possible values are determined by the output of ListEngineVersions, including AUTO. + type: String + - contextPath: AWS.Athena.Query.EngineVersion.EffectiveEngineVersion + description: The engine version on which the query runs. + type: String + - contextPath: AWS.Athena.Query.ExecutionParameters + description: A list of values for the parameters in a query. The values are applied sequentially to the parameters in the query in the order in which the parameters occur. The list of parameters is not returned in the response. + type: List + - contextPath: AWS.Athena.Query.SubstatementType + description: The type of query statement that was run. + type: String + - contextPath: AWS.Athena.SourceIPQueryResults + description: List of query results. + type: List + - name: aws-security-lake-guardduty-activity-query + description: This command is used to search for Guard Duty logs for any criticality level activity. + deprecated: false + arguments: + - name: database + description: The database to run the query against. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: table + description: The table to run the query against. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: severity + description: The severity of related events to search for. + required: true + isArray: false + defaultValue: "" + auto: PREDEFINED + predefined: + - Unknown + - Informational + - Low + - Medium + - High + - Critical + - Fatal + - Other + - name: output_location + description: The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/. + required: true + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleArn + description: The Amazon Resource Name (ARN) of the role to assume. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionName + description: An identifier for the assumed role session. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionDuration + description: The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: region + description: The AWS region. If not specified, the default region will be used. + - name: query_limit + description: A limit (number) to use for the query. If the keyword 'LIMIT' exists within 'QueryString', this parameter will be ignored. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: QueryExecutionId + hidden: true + description: ID of the newly created query. Used internally for polling. + - name: hide_polling_output + hidden: true + description: "" + polling: true + outputs: + - contextPath: AWS.Athena.Query.QueryExecutionId + description: The unique identifier for each query execution. + type: String + - contextPath: AWS.Athena.Query.Query + description: The SQL query statements which the query execution ran. + type: String + - contextPath: AWS.Athena.Query.StatementType + description: The type of query statement that was run. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.OutputLocation + description: The location in Amazon S3 where your query and calculation results are stored, such as 's3://path/to/query/bucket/'. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.EncryptionOption + description: If query and calculation results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE_KMS or CSE_KMS) and key information. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.KmsKey + description: For SSE_KMS and CSE_KMS, this is the KMS key ARN or ID. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.ExpectedBucketOwner + description: The Amazon Web Services account ID that you expect to be the owner of the Amazon S3 bucket specified by ResultConfiguration.OutputLocation. + type: String + - contextPath: AWS.Athena.Query.ResultConfiguration.AclConfiguration.S3AclOption + description: The Amazon S3 canned ACL that Athena should specify when storing query results. + type: String + - contextPath: AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.Enabled + description: True if previous query results can be reused when the query is run; otherwise, false. The default is false. + type: Boolean + - contextPath: AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.MaxAgeInMinutes + description: Specifies, in minutes, the maximum age of a previous query result that Athena should consider for reuse. The default is 60. + type: Number + - contextPath: AWS.Athena.Query.QueryExecutionContext.Database + description: The name of the database used in the query execution. + type: String + - contextPath: AWS.Athena.Query.QueryExecutionContext.Catalog + description: The name of the data catalog used in the query execution. + type: String + - contextPath: AWS.Athena.Query.Status.State + description: The state of the query execution. + type: String + - contextPath: AWS.Athena.Query.Status.StateChangeReason + description: Further detail about the status of the query. + type: String + - contextPath: AWS.Athena.Query.Status.SubmissionDateTime + description: The date and time that the query was submitted. + type: String + - contextPath: AWS.Athena.Query.Status.CompletionDateTime + description: The date and time that the query completed. + type: String + - contextPath: AWS.Athena.Query.Status.AthenaError.ErrorCategory + description: An integer value that specifies the category of a query failure error. + type: Number + - contextPath: AWS.Athena.Query.Status.AthenaError.ErrorType + description: An integer value that provides specific information about an Athena query error. For the meaning of specific values, see the Error Type Reference in the Amazon Athena User Guide. + type: Number + - contextPath: AWS.Athena.Query.Status.AthenaError.Retryable + description: True if the query might succeed if resubmitted. + type: Boolean + - contextPath: AWS.Athena.Query.Status.AthenaError.ErrorMessage + description: Contains a short description of the error that occurred. + type: String + - contextPath: AWS.Athena.Query.Statistics.EngineExecutionTimeInMillis + description: The number of milliseconds that the query took to execute. + type: Number + - contextPath: AWS.Athena.Query.Statistics.DataScannedInBytes + description: The number of bytes in the data that was queried. + type: Number + - contextPath: AWS.Athena.Query.Statistics.DataManifestLocation + description: The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. + type: String + - contextPath: AWS.Athena.Query.Statistics.TotalExecutionTimeInMillis + description: The number of milliseconds that Athena took to run the query. + type: Number + - contextPath: AWS.Athena.Query.Statistics.QueryQueueTimeInMillis + description: The number of milliseconds that the query was in your query queue waiting for resources. + type: Number + - contextPath: AWS.Athena.Query.Statistics.ServicePreProcessingTimeInMillis + description: The number of milliseconds that Athena took to preprocess the query before submitting the query to the query engine. + type: Number + - contextPath: AWS.Athena.Query.Statistics.QueryPlanningTimeInMillis + description: The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. + type: Number + - contextPath: AWS.Athena.Query.Statistics.ServiceProcessingTimeInMillis + description: The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query. + type: Number + - contextPath: AWS.Athena.Query.ResultReuseInformation.ReusedPreviousResult + description: True if a previous query result was reused; false if the result was generated from a new run of the query. + type: Boolean + - contextPath: AWS.Athena.Query.WorkGroup + description: The name of the workgroup in which the query ran. + type: String + - contextPath: AWS.Athena.Query.EngineVersion.SelectedEngineVersion + description: The engine version requested by the user. Possible values are determined by the output of ListEngineVersions, including AUTO. + type: String + - contextPath: AWS.Athena.Query.EngineVersion.EffectiveEngineVersion + description: The engine version on which the query runs. + type: String + - contextPath: AWS.Athena.Query.ExecutionParameters + description: A list of values for the parameters in a query. The values are applied sequentially to the parameters in the query in the order in which the parameters occur. The list of parameters is not returned in the response. + type: List + - contextPath: AWS.Athena.Query.SubstatementType + description: The type of query statement that was run. + type: String + - contextPath: AWS.Athena.GuardDutyActivityQueryResults + description: List of query results. + type: List + - name: aws-security-lake-data-sources-list + description: Retrieves a snapshot of the current region, including whether Amazon Security Lake is enabled for those accounts and which sources Security Lake is collecting data from. In order to run this command the user must have 'securitylake' permissions. + deprecated: false + arguments: + - name: accounts + description: The Amazon Web Services account ID for which a static snapshot of the current Amazon Web Services Region, including enabled accounts and log sources, is retrieved. + required: false + isArray: true + defaultValue: "" + predefined: + - "" + - name: limit + description: The maximum limit of accounts for which the static snapshot of the current region, including enabled accounts and log sources, is retrieved. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: next_token + description: Lists if there are more results available. The value of nextToken is a unique pagination token for each page. Repeat the call using the returned token to retrieve the next page. Keep all other arguments unchanged. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleArn + description: The Amazon Resource Name (ARN) of the role to assume. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionName + description: An identifier for the assumed role session. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionDuration + description: The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: region + description: The AWS region. If not specified, the default region will be used. + - name: query_limit + description: A limit (number) to use for the query. If the keyword 'LIMIT' exists within 'QueryString', this parameter will be ignored. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + outputs: + - contextPath: AWS.SecurityLake.DataLakeSource.DataLakeArn + description: The Amazon Resource Name (ARN) created by you to provide to the subscriber. + type: String + - contextPath: AWS.SecurityLake.DataLakeSource.DataLakeSources.account + description: The ID of the Security Lake account for which logs are collected. + type: String + - contextPath: AWS.SecurityLake.DataLakeSource.DataLakeSources.eventClasses + description: The Open Cybersecurity Schema Framework (OCSF) event classes which describes the type of data that the custom source will send to Security Lake. + type: List + - contextPath: AWS.SecurityLake.DataLakeSource.DataLakeSources.sourceName + description: The supported Amazon Web Services from which logs and events are collected. Amazon Security Lake supports log and event collection for natively supported Amazon Web Services. + type: String + - contextPath: AWS.SecurityLake.DataLakeSource.DataLakeSources.sourceStatuses.resource + description: Defines the path in which the stored logs are available which has information on your systems, applications, and services. + type: String + - contextPath: AWS.SecurityLake.DataLakeSource.DataLakeSources.sourceStatuses.status + description: The health status of services, including error codes and patterns. + type: String + - contextPath: AWS.SecurityLake.DataLakeSourceNextToken + description: Lists if there are more results available. The value of nextToken is a unique pagination token for each page. Repeat the call using the returned token to retrieve the next page. Keep all other arguments unchanged. + type: String + - name: aws-security-lake-data-lakes-list + description: Retrieves the Amazon Security Lake configuration object for the specified Amazon Web Services Regions. In order to run this command the user must have 'securitylake' permissions. + deprecated: false + arguments: + - name: regions + description: The list of regions where Security Lake is enabled. + required: false + isArray: true + defaultValue: "" + predefined: + - "" + - name: roleArn + description: The Amazon Resource Name (ARN) of the role to assume. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionName + description: An identifier for the assumed role session. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: roleSessionDuration + description: The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + - name: region + description: The AWS region. If not specified, the default region will be used. + - name: query_limit + description: A limit (number) to use for the query. If the keyword 'LIMIT' exists within 'QueryString', this parameter will be ignored. + required: false + isArray: false + defaultValue: "" + predefined: + - "" + outputs: + - contextPath: AWS.SecurityLake.createStatus + description: Retrieves the status of the configuration operation for an account in Amazon Security Lake. + type: String + - contextPath: AWS.SecurityLake.dataLakeArn + description: The Amazon Resource Name (ARN) created by you to provide to the subscriber. + type: String + - contextPath: AWS.SecurityLake.encryptionConfiguration.kmsKeyId + description: The ID of the KMS encryption key used by Amazon Security Lake to encrypt the Security Lake object. + type: String + - contextPath: AWS.SecurityLake.lifecycleConfiguration.expiration.days + description: Number of days before data expires in the Amazon Security Lake object. + type: Number + - contextPath: AWS.SecurityLake.lifecycleConfiguration.transitions.days + description: Number of days before data transitions to a different S3 Storage Class in the Amazon Security Lake object. + type: Number + - contextPath: AWS.SecurityLake.lifecycleConfiguration.transitions.storageClass + description: The range of storage classes that you can choose from based on the data access, resiliency, and cost requirements of your workloads. + type: String + - contextPath: AWS.SecurityLake.region + description: The Amazon Web Services regions where Security Lake is enabled. + type: String + - contextPath: AWS.SecurityLake.replicationConfiguration.regions + description: Replication enables automatic, asynchronous copying of objects across Amazon S3 buckets. + type: String + - contextPath: AWS.SecurityLake.replicationConfiguration.roleArn + description: Replication settings for the Amazon S3 buckets. This parameter uses the Identity and Access Management (IAM) role you created that is managed by Security Lake, to ensure the replication setting is correct. + type: String + - contextPath: AWS.SecurityLake.s3BucketArn + description: The ARN for the Amazon Security Lake Amazon S3 bucket. + type: String + - contextPath: AWS.SecurityLake.updateStatus.exception.code + description: The reason code for the exception of the last UpdateDataLake or DeleteDataLake API request. + type: String + - contextPath: AWS.SecurityLake.updateStatus.exception.reason + description: The reason for the exception of the last UpdateDataLake or DeleteDataLake API request. + type: String + - contextPath: AWS.SecurityLake.updateStatus.requestId + description: The unique ID for the last UpdateDataLake or DeleteDataLake API request. + type: String + - contextPath: AWS.SecurityLake.updateStatus.status + description: The status of the last UpdateDataLake or DeleteDataLake API request that was requested. + type: String + + runonce: false + script: "-" + type: python + subtype: python3 + dockerimage: demisto/boto3py3:1.0.0.86497 + feed: false + isfetch: false +fromversion: 6.10.0 +tests: +- No tests (auto formatted) diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake_description.md b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake_description.md new file mode 100644 index 000000000000..3cf7bbdc29c4 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake_description.md @@ -0,0 +1,16 @@ +Before you can use the AWS Security Lake integration in XSOAR, you need to perform several configuration steps in your AWS environment. + +### Prerequisites +- Attach an instance profile with the required permissions to the Cortex XSOAR server or engine that is running +on your AWS environment. +- Instance profile requires minimum permission: sts:AssumeRole. +- Instance profile requires permission to assume the roles needed by the AWS integrations. + +### Configure AWS Settings +1. Create an IAM Role for the Instance Profile. +2. Attach a Role to the Instance Profile. +3. Configure the necessary IAM Roles that the AWS integration can assume. + +For detailed instructions, see the [AWS Integrations - Authentication](https://xsoar.pan.dev/docs/reference/articles/aws-integrations---authentication). + +Command descriptions, input descriptions, and output descriptions are taken from the Amazon ACM documentation. For more information, see the [Amazon Athena documention](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/athena.html) or the [AWS Security Lake documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/securitylake.html). diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake_image.png b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake_image.png new file mode 100644 index 000000000000..f70728817955 Binary files /dev/null and b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake_image.png differ diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake_test.py b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake_test.py new file mode 100644 index 000000000000..9e58030bfb9c --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/AWSSecurityLake_test.py @@ -0,0 +1,149 @@ +import pytest +import importlib +import json +from pathlib import Path + +AWSSecurityLake = importlib.import_module("AWSSecurityLake") + + +class MockClient: + def __init__(self, *args, **kwargs): + pass + + def start_query_execution(self, *args, **kwargs): + pass + + def stop_query_execution(self, *args, **kwargs): + pass + + def get_query_execution(self, *args, **kwargs): + pass + + def get_query_results(self, *args, **kwargs): + pass + + def list_data_catalogs(self, *args, **kwargs): + pass + + def list_databases(self, *args, **kwargs): + pass + + def list_table_metadata(self, *args, **kwargs): + pass + + def list_data_lakes(self, *args, **kwargs): + pass + + def get_data_lake_sources(self, *args, **kwargs): + pass + + +def load_test_data(folder: str, file_name: str) -> dict | str: + """ + A function for loading and returning data from test files within the "test_data" folder. + + Args: + folder (str): Name of the parent folder of the file within `test_data`. + file_name (str): Name of a json file to load data from. + + Returns: + dict | str: The data loaded from the file. If the file is a JSON file, a dict is returned, otherwise a string. + """ + with open(Path("test_data") / folder / file_name) as f: + if file_name.endswith(".json"): + return json.load(f) + + return f.read() + + +def test_execute_query_command(mocker): + client = MockClient() + start_query_execution_mock_data = load_test_data("raw_data_mock", "start_query_execution.json") + mocker.patch.object(client, "start_query_execution", return_value=start_query_execution_mock_data) + get_query_execution_mock_data = load_test_data("raw_data_mock", "get_query_execution.json") + mocker.patch.object(client, "get_query_execution", return_value=get_query_execution_mock_data) + get_query_results_mock_data = load_test_data("raw_data_mock", "get_query_results.json") + mocker.patch.object(client, "get_query_results", return_value=get_query_results_mock_data) + + args = { + "query_string": "SELECT * FROM test_db.test_table", + "output_location": "s3://athena-queries-test", + } + + result = AWSSecurityLake.execute_query_command(args, "QueryResults", client) + + expected_context_execution_details = load_test_data("expected_context", "get_query_execution_command.json") + expected_context_results = load_test_data("expected_context", "get_query_results_command.json") + expected_context = { + "AWS.SecurityLake.Query": expected_context_execution_details, + "AWS.SecurityLake.QueryResults": expected_context_results, + } + assert result.outputs == expected_context + + expected_hr = load_test_data("expected_hr", "get_query_results_command.txt") + assert result.readable_output == expected_hr + + +COMMANDS = [ + (AWSSecurityLake.list_catalogs_command, "list_catalogs_command.json", "list_data_catalogs"), + (AWSSecurityLake.list_databases_command, "list_database_command.json", "list_databases"), + (AWSSecurityLake.list_table_metadata_command, "list_table_metadata_command.json", "list_table_metadata"), + (AWSSecurityLake.list_sources_command, "list_sources_command.json", "get_data_lake_sources"), + (AWSSecurityLake.list_data_lakes_command, "list_data_lakes_command.json", "list_data_lakes"), +] + + +@pytest.mark.parametrize("command, file_name, client_command", COMMANDS) +def test_general_command(mocker, command, file_name, client_command): + """ + Given: argument to command + When: running the relevant command + Then: validate that the correct values are returned. + """ + + client = MockClient() + response = load_test_data("raw_data_mock", file_name) + outputs = load_test_data("expected_context", file_name) + mocker.patch.object(client, client_command, return_value=response) + + result = command(client, {}) + assert result.outputs == outputs + + +QUEYRY_COMMANDS = [ + ( + AWSSecurityLake.mfalogin_query_command, + {"database": "test_db", "table": "test_table", "user_name": "1234"}, + "SELECT * FROM test_db.test_table WHERE CAST(actor.user.name AS VARCHAR) = '1234';", + "MfaLoginQueryResults", + ), + ( + AWSSecurityLake.source_ip_query_command, + {"database": "test_db", "table": "test_table", "ip_src": "1234"}, + "SELECT * FROM test_db.test_table WHERE CAST(src_endpoint.ip AS VARCHAR) = '1234';", + "SourceIPQueryResults", + ), + ( + AWSSecurityLake.guardduty_activity_query_command, + {"database": "test_db", "table": "test_table", "severity": "Critical"}, + "SELECT * FROM test_db.test_table WHERE severity = 'Critical';", + "GuardDutyActivityQueryResults", + ), +] + + +@pytest.mark.parametrize("command, args, query, query_results_context_key", QUEYRY_COMMANDS) +def test_query_creation_commands(mocker, command, args, query, query_results_context_key): + """ + Given: Command arguments. + When: Running query generating command. + Then: Validate correct values are generated when calling the execute_query_command. + """ + client = MockClient() + execute_command = mocker.patch.object(AWSSecurityLake, "execute_query_command") + + command(client=client, args=args) + + assert execute_command.called is True + assert execute_command.call_args.kwargs.get("args").get("query_string") == query + assert execute_command.call_args.kwargs.get("query_results_context_key") == query_results_context_key diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/README.md b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/README.md new file mode 100644 index 000000000000..a754c8d496a9 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/README.md @@ -0,0 +1,482 @@ +Amazon Security Lake is a fully managed security data lake service. +This integration was integrated and tested with version 1.34.20 of AWS Security Lake SDK (boto3). + +## Configure AWS-SecurityLake on Cortex XSOAR + +1. Navigate to **Settings** > **Integrations** > **Servers & Services**. +2. Search for AWS-SecurityLake. +3. Click **Add instance** to create and configure a new integration instance. + + | **Parameter** | **Description** | **Required** | + | --- | --- | --- | + | Name | User name | True | + | Role Arn | Role ARN | False | + | Role Session Name | Role Session Name | False | + | Role Session Duration | Role Session Duration | False | + | AWS Default Region | AWS Default Region | False | + | Access Key | Access Key | False | + | Secret Key | Secret Key | False | + | Timeout | The time in seconds until a timeout exception is reached. You can specify just the read timeout (for example 60) or also the connect timeout followed after a comma (for example 60,10). If a connect timeout is not specified, a default of 10 second will be used. | False | + | Retries | The maximum number of retry attempts when connection or throttling errors are encountered. Set to 0 to disable retries. The default value is 5 and the limit is 10. Note: Increasing the number of retries will increase the execution time. | False | + | Trust any certificate (not secure) | | False | + | Use system proxy settings | | False | + +4. Click **Test** to validate the URLs, token, and connection. + +## Commands + +You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook. +After you successfully execute a command, a DBot message appears in the War Room with the command details. + +### aws-security-lake-query-execute + +*** +Execute a new query, wait for the query to complete (using polling), and return query's execution information, and query's results (if successful). Either 'OutputLocation' or 'WorkGroup' must be specified for the query to run. + +#### Base Command + +`aws-security-lake-query-execute` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| query_string | The SQL query statements to be executed. | Required | +| query_limit | A limit (number) to use for the query. If the keyword 'LIMIT' exists within 'QueryString', this parameter will be ignored. Default is 50. | Optional | +| client_request_token | A unique case-sensitive string used to ensure the request to create the query is idempotent (executes only once). If another StartQueryExecution request is received, the same response is returned and another query is not created. | Optional | +| database | The name of the database. | Optional | +| output_location | The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/. | Optional | +| encryption_option | Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3 ), server-side encryption with KMS-managed keys (SSE-KMS ), or client-side encryption with KMS-managed keys (CSE-KMS) is used. | Optional | +| kms_key | For SSE-KMS and CSE-KMS , this is the KMS key ARN or ID. | Optional | +| work_group | The name of the workgroup in which the query is being started. | Optional | +| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional | +| roleSessionName | An identifier for the assumed role session. | Optional | +| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional | +| region | The AWS region. If not specified, the default region will be used. | Optional | +| QueryExecutionId | ID of the newly created query. Used internally for polling. | Optional | +| hide_polling_output | | Optional | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| AWS.Athena.Query.QueryExecutionId | String | The unique identifier for each query execution. | +| AWS.Athena.Query.Query | String | The SQL query statements which the query execution ran. | +| AWS.Athena.Query.StatementType | String | The type of query statement that was run. | +| AWS.Athena.Query.ResultConfiguration.OutputLocation | String | The location in Amazon S3 where your query and calculation results are stored, such as 's3://path/to/query/bucket/'. | +| AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.EncryptionOption | String | If query and calculation results are encrypted in Amazon S3, indicates the encryption option used \(for example, SSE_KMS or CSE_KMS\) and key information. | +| AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.KmsKey | String | For SSE_KMS and CSE_KMS, this is the KMS key ARN or ID. | +| AWS.Athena.Query.ResultConfiguration.ExpectedBucketOwner | String | The Amazon Web Services account ID that you expect to be the owner of the Amazon S3 bucket specified by ResultConfiguration.OutputLocation. | +| AWS.Athena.Query.ResultConfiguration.AclConfiguration.S3AclOption | String | The Amazon S3 canned ACL that Athena should specify when storing query results. | +| AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.Enabled | Boolean | True if previous query results can be reused when the query is run; otherwise, false. The default is false. | +| AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.MaxAgeInMinutes | Number | Specifies, in minutes, the maximum age of a previous query result that Athena should consider for reuse. The default is 60. | +| AWS.Athena.Query.QueryExecutionContext.Database | String | The name of the database used in the query execution. | +| AWS.Athena.Query.QueryExecutionContext.Catalog | String | The name of the data catalog used in the query execution. | +| AWS.Athena.Query.Status.State | String | The state of the query execution. | +| AWS.Athena.Query.Status.StateChangeReason | String | Further detail about the status of the query. | +| AWS.Athena.Query.Status.SubmissionDateTime | String | The date and time that the query was submitted. | +| AWS.Athena.Query.Status.CompletionDateTime | String | The date and time that the query completed. | +| AWS.Athena.Query.Status.AthenaError.ErrorCategory | Number | An integer value that specifies the category of a query failure error. | +| AWS.Athena.Query.Status.AthenaError.ErrorType | Number | An integer value that provides specific information about an Athena query error. For the meaning of specific values, see the Error Type Reference in the Amazon Athena User Guide. | +| AWS.Athena.Query.Status.AthenaError.Retryable | Boolean | True if the query might succeed if resubmitted. | +| AWS.Athena.Query.Status.AthenaError.ErrorMessage | String | Contains a short description of the error that occurred. | +| AWS.Athena.Query.Statistics.EngineExecutionTimeInMillis | Number | The number of milliseconds that the query took to execute. | +| AWS.Athena.Query.Statistics.DataScannedInBytes | Number | The number of bytes in the data that was queried. | +| AWS.Athena.Query.Statistics.DataManifestLocation | String | The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. | +| AWS.Athena.Query.Statistics.TotalExecutionTimeInMillis | Number | The number of milliseconds that Athena took to run the query. | +| AWS.Athena.Query.Statistics.QueryQueueTimeInMillis | Number | The number of milliseconds that the query was in your query queue waiting for resources. | +| AWS.Athena.Query.Statistics.ServicePreProcessingTimeInMillis | Number | The number of milliseconds that Athena took to preprocess the query before submitting the query to the query engine. | +| AWS.Athena.Query.Statistics.QueryPlanningTimeInMillis | Number | The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. | +| AWS.Athena.Query.Statistics.ServiceProcessingTimeInMillis | Number | The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query. | +| AWS.Athena.Query.ResultReuseInformation.ReusedPreviousResult | Boolean | True if a previous query result was reused; false if the result was generated from a new run of the query. | +| AWS.Athena.Query.WorkGroup | String | The name of the workgroup in which the query ran. | +| AWS.Athena.Query.EngineVersion.SelectedEngineVersion | String | The engine version requested by the user. Possible values are determined by the output of ListEngineVersions, including AUTO. | +| AWS.Athena.Query.EngineVersion.EffectiveEngineVersion | String | The engine version on which the query runs. | +| AWS.Athena.Query.ExecutionParameters | List | A list of values for the parameters in a query. The values are applied sequentially to the parameters in the query in the order in which the parameters occur. The list of parameters is not returned in the response. | +| AWS.Athena.Query.SubstatementType | String | The type of query statement that was run. | +| AWS.Athena.QueryResults | List | List of query results. | + +### aws-security-lake-data-catalogs-list + +*** +Lists the data catalogs in the current Amazon Web Services account. + +#### Base Command + +`aws-security-lake-data-catalogs-list` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| work_group | The name of the workgroup. Required if making an IAM Identity Center request. | Optional | +| region | The AWS region. If not specified, the default region will be used. | Optional | +| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional | +| roleSessionName | An identifier for the assumed role session. | Optional | +| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional | +| limit | Specifies the maximum number of data catalogs to return. | Optional | +| next_token | Specifies the maximum number of data catalogs to return. | Optional | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| AWS.SecurityLake.Catalog.CatalogName | String | The name of the data catalog. | +| AWS.SecurityLake.Catalog.Type | String | The data catalog type. | +| AWS.SecurityLake.CatalogNextToken | String | A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. | + +### aws-security-lake-databases-list + +*** +Lists the databases in the specified data catalog. + +#### Base Command + +`aws-security-lake-databases-list` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| catalog_name | The name of the data catalog that contains the databases to return. | Required | +| work_group | The name of the workgroup for which the metadata is being fetched. Required if requesting an IAM Identity Center enabled Glue Data Catalog. | Optional | +| region | The AWS region. If not specified, the default region will be used. | Optional | +| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional | +| roleSessionName | An identifier for the assumed role session. | Optional | +| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional | +| limit | Specifies the maximum number of results to return. | Optional | +| next_token | A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. | Optional | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| AWS.SecurityLake.Database.Name | String | The name of the database. | +| AWS.SecurityLake.Database.Description | String | An optional description of the database. | +| AWS.SecurityLake.Database.Parameters | List | A set of custom key/value pairs. | +| AWS.SecurityLake.DatabaseNextToken | String | A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. | + +#### Command Example +```!aws-security-lake-databases-list catalog_name=Test``` + +### aws-security-lake-table-metadata-list + +*** +Lists the metadata for the tables in the specified data catalog database. + +#### Base Command + +`aws-security-lake-table-metadata-list` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| catalog_name | The name of the data catalog that contains the databases to return. | Required | +| database_name | The name of the database for which table metadata should be returned. | Required | +| expression | A regex filter that pattern-matches table names. If no expression is supplied, metadata for all tables are listed. | Optional | +| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional | +| roleSessionName | An identifier for the assumed role session. | Optional | +| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional | +| limit | Specifies the maximum number of results to return. | Optional | +| next_token | A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. | Optional | +| work_group | The name of the workgroup for which the metadata is being fetched. Required if requesting an IAM Identity Center enabled Glue Data Catalog. | Optional | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| AWS.SecurityLake.TableMetadata.Name | String | The name of the table. | +| AWS.SecurityLake.TableMetadata.CreateTime | Date | The time that the table was created. | +| AWS.SecurityLake.TableMetadata.LastAccessTime | Date | The last time the table was accessed. | +| AWS.SecurityLake.TableMetadata.TableType | String | The type of table. In Athena, only EXTERNAL_TABLE is supported. | +| AWS.SecurityLake.TableMetadata.Columns.Name | String | The name of the column. | +| AWS.SecurityLake.TableMetadata.Columns.Type | String | The data type of the column. | +| AWS.SecurityLake.TableMetadata.Columns.Comment | String | Optional information about the column. | +| AWS.SecurityLake.TableMetadata.PartitionKeys.Name | String | The name of the column. | +| AWS.SecurityLake.TableMetadata.PartitionKeys.Type | String | The data type of the column. | +| AWS.SecurityLake.TableMetadata.PartitionKeys.Comment | String | Optional information about the column. | +| AWS.SecurityLake.TableMetadata.Parameters | List | A set of custom key/value pairs for table properties. | +| AWS.SecurityLake.TableMetadataNextToken | String | A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. | + +#### Command Example +```!aws-security-lake-table-metadata-list catalog_name=Test database_name=test``` + +### aws-security-lake-user-mfalogin-query + +*** +Runs query that takes a provided username and queries the AWS Security Lake for MFA login attempts (Success/Failed) associated with the user's account, using AWS CloudTrail logs. + +#### Base Command + +`aws-security-lake-user-mfalogin-query` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| database | The database to run the query against. | Required | +| table | The table to run the query against. | Required | +| user_name | The username to search for MFA login attempts. | Required | +| output_location | The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/. | Required | +| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional | +| roleSessionName | An identifier for the assumed role session. | Optional | +| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional | +| region | The AWS region. If not specified, the default region will be used. | Optional | +| query_limit | A limit (number) to use for the query. If the keyword 'LIMIT' exists within 'QueryString', this parameter will be ignored. | Optional | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| AWS.Athena.Query.QueryExecutionId | String | The unique identifier for each query execution. | +| AWS.Athena.Query.Query | String | The SQL query statements which the query execution ran. | +| AWS.Athena.Query.StatementType | String | The type of query statement that was run. | +| AWS.Athena.Query.ResultConfiguration.OutputLocation | String | The location in Amazon S3 where your query and calculation results are stored, such as 's3://path/to/query/bucket/'. | +| AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.EncryptionOption | String | If query and calculation results are encrypted in Amazon S3, indicates the encryption option used \(for example, SSE_KMS or CSE_KMS\) and key information. | +| AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.KmsKey | String | For SSE_KMS and CSE_KMS, this is the KMS key ARN or ID. | +| AWS.Athena.Query.ResultConfiguration.ExpectedBucketOwner | String | The Amazon Web Services account ID that you expect to be the owner of the Amazon S3 bucket specified by ResultConfiguration.OutputLocation. | +| AWS.Athena.Query.ResultConfiguration.AclConfiguration.S3AclOption | String | The Amazon S3 canned ACL that Athena should specify when storing query results. | +| AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.Enabled | Boolean | True if previous query results can be reused when the query is run; otherwise, false. The default is false. | +| AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.MaxAgeInMinutes | Number | Specifies, in minutes, the maximum age of a previous query result that Athena should consider for reuse. The default is 60. | +| AWS.Athena.Query.QueryExecutionContext.Database | String | The name of the database used in the query execution. | +| AWS.Athena.Query.QueryExecutionContext.Catalog | String | The name of the data catalog used in the query execution. | +| AWS.Athena.Query.Status.State | String | The state of the query execution. | +| AWS.Athena.Query.Status.StateChangeReason | String | Further detail about the status of the query. | +| AWS.Athena.Query.Status.SubmissionDateTime | String | The date and time that the query was submitted. | +| AWS.Athena.Query.Status.CompletionDateTime | String | The date and time that the query completed. | +| AWS.Athena.Query.Status.AthenaError.ErrorCategory | Number | An integer value that specifies the category of a query failure error. | +| AWS.Athena.Query.Status.AthenaError.ErrorType | Number | An integer value that provides specific information about an Athena query error. For the meaning of specific values, see the Error Type Reference in the Amazon Athena User Guide. | +| AWS.Athena.Query.Status.AthenaError.Retryable | Boolean | True if the query might succeed if resubmitted. | +| AWS.Athena.Query.Status.AthenaError.ErrorMessage | String | Contains a short description of the error that occurred. | +| AWS.Athena.Query.Statistics.EngineExecutionTimeInMillis | Number | The number of milliseconds that the query took to execute. | +| AWS.Athena.Query.Statistics.DataScannedInBytes | Number | The number of bytes in the data that was queried. | +| AWS.Athena.Query.Statistics.DataManifestLocation | String | The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. | +| AWS.Athena.Query.Statistics.TotalExecutionTimeInMillis | Number | The number of milliseconds that Athena took to run the query. | +| AWS.Athena.Query.Statistics.QueryQueueTimeInMillis | Number | The number of milliseconds that the query was in your query queue waiting for resources. | +| AWS.Athena.Query.Statistics.ServicePreProcessingTimeInMillis | Number | The number of milliseconds that Athena took to preprocess the query before submitting the query to the query engine. | +| AWS.Athena.Query.Statistics.QueryPlanningTimeInMillis | Number | The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. | +| AWS.Athena.Query.Statistics.ServiceProcessingTimeInMillis | Number | The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query. | +| AWS.Athena.Query.ResultReuseInformation.ReusedPreviousResult | Boolean | True if a previous query result was reused; false if the result was generated from a new run of the query. | +| AWS.Athena.Query.WorkGroup | String | The name of the workgroup in which the query ran. | +| AWS.Athena.Query.EngineVersion.SelectedEngineVersion | String | The engine version requested by the user. Possible values are determined by the output of ListEngineVersions, including AUTO. | +| AWS.Athena.Query.EngineVersion.EffectiveEngineVersion | String | The engine version on which the query runs. | +| AWS.Athena.Query.ExecutionParameters | List | A list of values for the parameters in a query. The values are applied sequentially to the parameters in the query in the order in which the parameters occur. The list of parameters is not returned in the response. | +| AWS.Athena.Query.SubstatementType | String | The type of query statement that was run. | +| AWS.Athena.MfaLoginQueryResults | List | List of query results. | + +#### Command Example +```!aws-security-lake-user-mfalogin-query table=Test database=test user_name=123 output_location=s3://path/to/query/bucket/``` + +### aws-security-lake-source-ip-query + +*** +Runs a query that takes a provided source IP address and queries the AWS Security Lake for console login attempts (Success/Failed) associated with the IP address, using AWS CloudTrail logs. + +#### Base Command + +`aws-security-lake-source-ip-query` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| database | The database to run the query against. | Required | +| table | The table to run the query against. | Required | +| ip_src | The source IP address to search for console login attempts. | Required | +| output_location | The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/. | Required | +| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional | +| roleSessionName | An identifier for the assumed role session. | Optional | +| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional | +| region | The AWS region. If not specified, the default region will be used. | Optional | +| query_limit | A limit (number) to use for the query. If the keyword 'LIMIT' exists within 'QueryString', this parameter will be ignored. | Optional | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| AWS.Athena.Query.QueryExecutionId | String | The unique identifier for each query execution. | +| AWS.Athena.Query.Query | String | The SQL query statements which the query execution ran. | +| AWS.Athena.Query.StatementType | String | The type of query statement that was run. | +| AWS.Athena.Query.ResultConfiguration.OutputLocation | String | The location in Amazon S3 where your query and calculation results are stored, such as 's3://path/to/query/bucket/'. | +| AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.EncryptionOption | String | If query and calculation results are encrypted in Amazon S3, indicates the encryption option used \(for example, SSE_KMS or CSE_KMS\) and key information. | +| AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.KmsKey | String | For SSE_KMS and CSE_KMS, this is the KMS key ARN or ID. | +| AWS.Athena.Query.ResultConfiguration.ExpectedBucketOwner | String | The Amazon Web Services account ID that you expect to be the owner of the Amazon S3 bucket specified by ResultConfiguration.OutputLocation. | +| AWS.Athena.Query.ResultConfiguration.AclConfiguration.S3AclOption | String | The Amazon S3 canned ACL that Athena should specify when storing query results. | +| AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.Enabled | Boolean | True if previous query results can be reused when the query is run; otherwise, false. The default is false. | +| AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.MaxAgeInMinutes | Number | Specifies, in minutes, the maximum age of a previous query result that Athena should consider for reuse. The default is 60. | +| AWS.Athena.Query.QueryExecutionContext.Database | String | The name of the database used in the query execution. | +| AWS.Athena.Query.QueryExecutionContext.Catalog | String | The name of the data catalog used in the query execution. | +| AWS.Athena.Query.Status.State | String | The state of the query execution. | +| AWS.Athena.Query.Status.StateChangeReason | String | Further detail about the status of the query. | +| AWS.Athena.Query.Status.SubmissionDateTime | String | The date and time that the query was submitted. | +| AWS.Athena.Query.Status.CompletionDateTime | String | The date and time that the query completed. | +| AWS.Athena.Query.Status.AthenaError.ErrorCategory | Number | An integer value that specifies the category of a query failure error. | +| AWS.Athena.Query.Status.AthenaError.ErrorType | Number | An integer value that provides specific information about an Athena query error. For the meaning of specific values, see the Error Type Reference in the Amazon Athena User Guide. | +| AWS.Athena.Query.Status.AthenaError.Retryable | Boolean | True if the query might succeed if resubmitted. | +| AWS.Athena.Query.Status.AthenaError.ErrorMessage | String | Contains a short description of the error that occurred. | +| AWS.Athena.Query.Statistics.EngineExecutionTimeInMillis | Number | The number of milliseconds that the query took to execute. | +| AWS.Athena.Query.Statistics.DataScannedInBytes | Number | The number of bytes in the data that was queried. | +| AWS.Athena.Query.Statistics.DataManifestLocation | String | The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. | +| AWS.Athena.Query.Statistics.TotalExecutionTimeInMillis | Number | The number of milliseconds that Athena took to run the query. | +| AWS.Athena.Query.Statistics.QueryQueueTimeInMillis | Number | The number of milliseconds that the query was in your query queue waiting for resources. | +| AWS.Athena.Query.Statistics.ServicePreProcessingTimeInMillis | Number | The number of milliseconds that Athena took to preprocess the query before submitting the query to the query engine. | +| AWS.Athena.Query.Statistics.QueryPlanningTimeInMillis | Number | The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. | +| AWS.Athena.Query.Statistics.ServiceProcessingTimeInMillis | Number | The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query. | +| AWS.Athena.Query.ResultReuseInformation.ReusedPreviousResult | Boolean | True if a previous query result was reused; false if the result was generated from a new run of the query. | +| AWS.Athena.Query.WorkGroup | String | The name of the workgroup in which the query ran. | +| AWS.Athena.Query.EngineVersion.SelectedEngineVersion | String | The engine version requested by the user. Possible values are determined by the output of ListEngineVersions, including AUTO. | +| AWS.Athena.Query.EngineVersion.EffectiveEngineVersion | String | The engine version on which the query runs. | +| AWS.Athena.Query.ExecutionParameters | List | A list of values for the parameters in a query. The values are applied sequentially to the parameters in the query in the order in which the parameters occur. The list of parameters is not returned in the response. | +| AWS.Athena.Query.SubstatementType | String | The type of query statement that was run. | +| AWS.Athena.SourceIPQueryResults | List | List of query results. | + +#### Command Example +```!aws-security-lake-source-ip-query table=Test database=test ip_src=1.2.3.4 output_location=s3://path/to/query/bucket/``` + +### aws-security-lake-guardduty-activity-query + +*** +This command is used to search for Guard Duty logs for any criticality level activity. + +#### Base Command + +`aws-security-lake-guardduty-activity-query` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| database | The database to run the query against. | Required | +| table | The table to run the query against. | Required | +| severity | The severity of searchingto search related events for. Possible values are: 0-Unknown, 1-Informational, 2-Low, 3-Medium, 4-High, 5-Critical, 6-Fatal, 99-Other. | Required | +| output_location | The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/. | Required | +| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional | +| roleSessionName | An identifier for the assumed role session. | Optional | +| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional | +| region | The AWS region. If not specified, the default region will be used. | Optional | +| query_limit | A limit (number) to use for the query. If the keyword 'LIMIT' exists within 'QueryString', this parameter will be ignored. | Optional | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| AWS.Athena.Query.QueryExecutionId | String | The unique identifier for each query execution. | +| AWS.Athena.Query.Query | String | The SQL query statements which the query execution ran. | +| AWS.Athena.Query.StatementType | String | The type of query statement that was run. | +| AWS.Athena.Query.ResultConfiguration.OutputLocation | String | The location in Amazon S3 where your query and calculation results are stored, such as 's3://path/to/query/bucket/'. | +| AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.EncryptionOption | String | If query and calculation results are encrypted in Amazon S3, indicates the encryption option used \(for example, SSE_KMS or CSE_KMS\) and key information. | +| AWS.Athena.Query.ResultConfiguration.EncryptionConfiguration.KmsKey | String | For SSE_KMS and CSE_KMS, this is the KMS key ARN or ID. | +| AWS.Athena.Query.ResultConfiguration.ExpectedBucketOwner | String | The Amazon Web Services account ID that you expect to be the owner of the Amazon S3 bucket specified by ResultConfiguration.OutputLocation. | +| AWS.Athena.Query.ResultConfiguration.AclConfiguration.S3AclOption | String | The Amazon S3 canned ACL that Athena should specify when storing query results. | +| AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.Enabled | Boolean | True if previous query results can be reused when the query is run; otherwise, false. The default is false. | +| AWS.Athena.Query.ResultReuseConfiguration.ResultReuseByAgeConfiguration.MaxAgeInMinutes | Number | Specifies, in minutes, the maximum age of a previous query result that Athena should consider for reuse. The default is 60. | +| AWS.Athena.Query.QueryExecutionContext.Database | String | The name of the database used in the query execution. | +| AWS.Athena.Query.QueryExecutionContext.Catalog | String | The name of the data catalog used in the query execution. | +| AWS.Athena.Query.Status.State | String | The state of the query execution. | +| AWS.Athena.Query.Status.StateChangeReason | String | Further detail about the status of the query. | +| AWS.Athena.Query.Status.SubmissionDateTime | String | The date and time that the query was submitted. | +| AWS.Athena.Query.Status.CompletionDateTime | String | The date and time that the query completed. | +| AWS.Athena.Query.Status.AthenaError.ErrorCategory | Number | An integer value that specifies the category of a query failure error. | +| AWS.Athena.Query.Status.AthenaError.ErrorType | Number | An integer value that provides specific information about an Athena query error. For the meaning of specific values, see the Error Type Reference in the Amazon Athena User Guide. | +| AWS.Athena.Query.Status.AthenaError.Retryable | Boolean | True if the query might succeed if resubmitted. | +| AWS.Athena.Query.Status.AthenaError.ErrorMessage | String | Contains a short description of the error that occurred. | +| AWS.Athena.Query.Statistics.EngineExecutionTimeInMillis | Number | The number of milliseconds that the query took to execute. | +| AWS.Athena.Query.Statistics.DataScannedInBytes | Number | The number of bytes in the data that was queried. | +| AWS.Athena.Query.Statistics.DataManifestLocation | String | The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. | +| AWS.Athena.Query.Statistics.TotalExecutionTimeInMillis | Number | The number of milliseconds that Athena took to run the query. | +| AWS.Athena.Query.Statistics.QueryQueueTimeInMillis | Number | The number of milliseconds that the query was in your query queue waiting for resources. | +| AWS.Athena.Query.Statistics.ServicePreProcessingTimeInMillis | Number | The number of milliseconds that Athena took to preprocess the query before submitting the query to the query engine. | +| AWS.Athena.Query.Statistics.QueryPlanningTimeInMillis | Number | The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. | +| AWS.Athena.Query.Statistics.ServiceProcessingTimeInMillis | Number | The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query. | +| AWS.Athena.Query.ResultReuseInformation.ReusedPreviousResult | Boolean | True if a previous query result was reused; false if the result was generated from a new run of the query. | +| AWS.Athena.Query.WorkGroup | String | The name of the workgroup in which the query ran. | +| AWS.Athena.Query.EngineVersion.SelectedEngineVersion | String | The engine version requested by the user. Possible values are determined by the output of ListEngineVersions, including AUTO. | +| AWS.Athena.Query.EngineVersion.EffectiveEngineVersion | String | The engine version on which the query runs. | +| AWS.Athena.Query.ExecutionParameters | List | A list of values for the parameters in a query. The values are applied sequentially to the parameters in the query in the order in which the parameters occur. The list of parameters is not returned in the response. | +| AWS.Athena.Query.SubstatementType | String | The type of query statement that was run. | +| AWS.Athena.GuardDutyActivityQueryResults | List | List of query results. | + +#### Command Example +```!aws-security-lake-guardduty-activity-query table=Test database=test severity=0-Unknown output_location=s3://path/to/query/bucket/``` + +### aws-security-lake-data-sources-list + +*** +Retrieves a snapshot of the current region, including whether Amazon Security Lake is enabled for those accounts and which sources Security Lake is collecting data from. +In order to run this command the user must have 'securitylake' permissions. + +#### Base Command + +`aws-security-lake-data-sources-list` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| accounts | The Amazon Web Services account ID for which a static snapshot of the current Amazon Web Services Region, including enabled accounts and log sources, is retrieved. | Optional | +| limit | Specifies the maximum number of results to return. | Optional | +| next_token | Lists if there are more results available. The value of nextToken is a unique pagination token for each page. Repeat the call using the returned token to retrieve the next page. Keep all other arguments unchanged. | Optional | +| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional | +| roleSessionName | An identifier for the assumed role session. | Optional | +| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional | +| region | The AWS region. If not specified, the default region will be used. | Optional | +| query_limit | A limit (number) to use for the query. If the keyword 'LIMIT' exists within 'QueryString', this parameter will be ignored. | Optional | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| AWS.SecurityLake.DataLakeSource.DataLakeArn | String | The Amazon Resource Name \(ARN\) created by you to provide to the subscriber. | +| AWS.SecurityLake.DataLakeSource.DataLakeSources.account | String | The ID of the Security Lake account for which logs are collected. | +| AWS.SecurityLake.DataLakeSource.DataLakeSources.eventClasses | List | The Open Cybersecurity Schema Framework \(OCSF\) event classes which describes the type of data that the custom source will send to Security Lake. | +| AWS.SecurityLake.DataLakeSource.DataLakeSources.sourceName | String | The supported Amazon Web Services from which logs and events are collected. Amazon Security Lake supports log and event collection for natively supported Amazon Web Services. | +| AWS.SecurityLake.DataLakeSource.DataLakeSources.sourceStatuses.resource | String | Defines the path in which the stored logs are available which has information on your systems, applications, and services. | +| AWS.SecurityLake.DataLakeSource.DataLakeSources.sourceStatuses.status | String | The health status of services, including error codes and patterns. | +| AWS.SecurityLake.DataLakeSourceNextToken | String | Lists if there are more results available. The value of nextToken is a unique pagination token for each page. Repeat the call using the returned token to retrieve the next page. Keep all other arguments unchanged. | + +#### Command Example +```!aws-security-lake-data-sources-list``` + +### aws-security-lake-data-lakes-list + +*** +Retrieves the Amazon Security Lake configuration object for the specified Amazon Web Services Regions. +In order to run this command the user must have 'securitylake' permissions. + +#### Base Command + +`aws-security-lake-data-lakes-list` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| regions | The list of regions where Security Lake is enabled. | Optional | +| roleArn | The Amazon Resource Name (ARN) of the role to assume. | Optional | +| roleSessionName | An identifier for the assumed role session. | Optional | +| roleSessionDuration | The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) up to the maximum session duration setting for the role. | Optional | +| region | The AWS region. If not specified, the default region will be used. | Optional | +| query_limit | A limit (number) to use for the query. If the keyword 'LIMIT' exists within 'QueryString', this parameter will be ignored. | Optional | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| AWS.SecurityLake.createStatus | String | Retrieves the status of the configuration operation for an account in Amazon Security Lake. | +| AWS.SecurityLake.dataLakeArn | String | The Amazon Resource Name \(ARN\) created by you to provide to the subscriber. | +| AWS.SecurityLake.encryptionConfiguration.kmsKeyId | String | The ID of the KMS encryption key used by Amazon Security Lake to encrypt the Security Lake object. | +| AWS.SecurityLake.lifecycleConfiguration.expiration.days | Number | Number of days before data expires in the Amazon Security Lake object. | +| AWS.SecurityLake.lifecycleConfiguration.transitions.days | Number | Number of days before data transitions to a different S3 Storage Class in the Amazon Security Lake object. | +| AWS.SecurityLake.lifecycleConfiguration.transitions.storageClass | String | The range of storage classes that you can choose from based on the data access, resiliency, and cost requirements of your workloads. | +| AWS.SecurityLake.region | String | The Amazon Web Services regions where Security Lake is enabled. | +| AWS.SecurityLake.replicationConfiguration.regions | String | Replication enables automatic, asynchronous copying of objects across Amazon S3 buckets. | +| AWS.SecurityLake.replicationConfiguration.roleArn | String | Replication settings for the Amazon S3 buckets. This parameter uses the Identity and Access Management \(IAM\) role you created that is managed by Security Lake, to ensure the replication setting is correct. | +| AWS.SecurityLake.s3BucketArn | String | The ARN for the Amazon Security Lake Amazon S3 bucket. | +| AWS.SecurityLake.updateStatus.exception.code | String | The reason code for the exception of the last UpdateDataLake or DeleteDataLake API request. | +| AWS.SecurityLake.updateStatus.exception.reason | String | The reason for the exception of the last UpdateDataLake or DeleteDataLake API request. | +| AWS.SecurityLake.updateStatus.requestId | String | The unique ID for the last UpdateDataLake or DeleteDataLake API request. | +| AWS.SecurityLake.updateStatus.status | String | The status of the last UpdateDataLake or DeleteDataLake API request that was requested. | + +#### Command Example +```!aws-security-lake-data-lakes-list``` diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/command_examples b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/command_examples new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/get_query_execution_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/get_query_execution_command.json new file mode 100644 index 000000000000..043fa85cab66 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/get_query_execution_command.json @@ -0,0 +1,36 @@ +{ + "QueryExecutionId": "b3c194e7-6580-421c-81fa-4b409e1ba04f", + "Query": "SELECT * FROM test_db.test_table", + "StatementType": "DML", + "ResultConfiguration": { + "OutputLocation": "s3://athena-queries-test/b3c194e7-6580-421c-81fa-4b409e1ba04f.csv" + }, + "ResultReuseConfiguration": { + "ResultReuseByAgeConfiguration": { + "Enabled": false + } + }, + "QueryExecutionContext": {}, + "Status": { + "State": "SUCCEEDED", + "SubmissionDateTime": "2023-11-07T10:01:03", + "CompletionDateTime": "2023-11-07T10:01:04" + }, + "Statistics": { + "EngineExecutionTimeInMillis": 1074, + "DataScannedInBytes": 86996, + "TotalExecutionTimeInMillis": 1296, + "QueryQueueTimeInMillis": 192, + "QueryPlanningTimeInMillis": 493, + "ServiceProcessingTimeInMillis": 30, + "ResultReuseInformation": { + "ReusedPreviousResult": false + } + }, + "WorkGroup": "primary", + "EngineVersion": { + "SelectedEngineVersion": "AUTO", + "EffectiveEngineVersion": "Athena engine version 3" + }, + "SubstatementType": "SELECT" +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/get_query_results_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/get_query_results_command.json new file mode 100644 index 000000000000..fd468e25eb31 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/get_query_results_command.json @@ -0,0 +1,272 @@ +[ + { + "metadata": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=7c33bcd3-0252-4b28-b2c7-7f38ed881796, profiles=[cloud], version=1.0.0-rc.2}", + "time": "1699342808000", + "cloud": "{region=eu-central-1, provider=AWS}", + "api": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5AV2YZSR7D9DFDW8}}", + "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}", + "http_request": "{user_agent=s3.amazonaws.com}", + "src_endpoint": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}", + "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]", + "class_name": "API Activity", + "class_uid": "3005", + "category_name": "Audit Activity", + "category_uid": "3", + "severity_id": "1", + "severity": "Informational", + "activity_name": "Update", + "activity_id": "3", + "type_uid": "300503", + "type_name": "API Activity: Update", + "status": "Success", + "status_id": "1", + "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}", + "region": "eu-central-1", + "accountid": "654338056632", + "eventday": "20231107", + "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" + }, + { + "metadata": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=69d54eee-2c1c-4f51-b89f-45c7029695c6, profiles=[cloud], version=1.0.0-rc.2}", + "time": "1699342833000", + "cloud": "{region=eu-central-1, provider=AWS}", + "api": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BRW2SJ5SBFD7T91W}}", + "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", + "http_request": "{user_agent=cloudtrail.amazonaws.com}", + "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "class_name": "API Activity", + "class_uid": "3005", + "category_name": "Audit Activity", + "category_uid": "3", + "severity_id": "1", + "severity": "Informational", + "activity_name": "Update", + "activity_id": "3", + "type_uid": "300503", + "type_name": "API Activity: Update", + "status": "Success", + "status_id": "1", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2347}", + "region": "eu-central-1", + "accountid": "654338056632", + "eventday": "20231107", + "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" + }, + { + "metadata": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=2a0b2b1a-bff6-4a89-93e5-801e3fdf3b92, profiles=[cloud], version=1.0.0-rc.2}", + "time": "1699342772000", + "cloud": "{region=eu-central-1, provider=AWS}", + "api": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=55TVMR6HYD2ABTWB}}", + "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}", + "http_request": "{user_agent=s3.amazonaws.com}", + "src_endpoint": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}", + "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]", + "class_name": "API Activity", + "class_uid": "3005", + "category_name": "Audit Activity", + "category_uid": "3", + "severity_id": "1", + "severity": "Informational", + "activity_name": "Update", + "activity_id": "3", + "type_uid": "300503", + "type_name": "API Activity: Update", + "status": "Success", + "status_id": "1", + "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}", + "region": "eu-central-1", + "accountid": "654338056632", + "eventday": "20231107", + "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" + }, + { + "metadata": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=fee096f2-611d-4b3c-b092-ea06e8a527ca, profiles=[cloud], version=1.0.0-rc.2}", + "time": "1699137065000", + "cloud": "{region=eu-central-1, provider=AWS}", + "api": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5E1S778MRDJEDVSR}}", + "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", + "http_request": "{user_agent=cloudtrail.amazonaws.com}", + "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "class_name": "API Activity", + "class_uid": "3005", + "category_name": "Audit Activity", + "category_uid": "3", + "severity_id": "1", + "severity": "Informational", + "activity_name": "Update", + "activity_id": "3", + "type_uid": "300503", + "type_name": "API Activity: Update", + "status": "Success", + "status_id": "1", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1317}", + "region": "eu-central-1", + "accountid": "654338056632", + "eventday": "20231104", + "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" + }, + { + "metadata": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=68481d66-4a3e-42fe-a878-a1e6a7176c16, profiles=[cloud], version=1.0.0-rc.2}", + "time": "1699137170000", + "cloud": "{region=eu-central-1, provider=AWS}", + "api": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=VDKRDR9XNA8H2MF8}}", + "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", + "http_request": "{user_agent=cloudtrail.amazonaws.com}", + "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "class_name": "API Activity", + "class_uid": "3005", + "category_name": "Audit Activity", + "category_uid": "3", + "severity_id": "1", + "severity": "Informational", + "activity_name": "Update", + "activity_id": "3", + "type_uid": "300503", + "type_name": "API Activity: Update", + "status": "Success", + "status_id": "1", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1199}", + "region": "eu-central-1", + "accountid": "654338056632", + "eventday": "20231104", + "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" + }, + { + "metadata": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=6a5583e7-0463-439a-89c6-5431d3cd58fe, profiles=[cloud], version=1.0.0-rc.2}", + "time": "1699137247000", + "cloud": "{region=eu-central-1, provider=AWS}", + "api": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=PN8HM14HVKG8ED12}}", + "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", + "http_request": "{user_agent=cloudtrail.amazonaws.com}", + "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "class_name": "API Activity", + "class_uid": "3005", + "category_name": "Audit Activity", + "category_uid": "3", + "severity_id": "1", + "severity": "Informational", + "activity_name": "Update", + "activity_id": "3", + "type_uid": "300503", + "type_name": "API Activity: Update", + "status": "Success", + "status_id": "1", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1556}", + "region": "eu-central-1", + "accountid": "654338056632", + "eventday": "20231104", + "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" + }, + { + "metadata": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=e3cc1c86-950f-4201-a362-3f8f68631a83, profiles=[cloud], version=1.0.0-rc.2}", + "time": "1699342905000", + "cloud": "{region=eu-central-1, provider=AWS}", + "api": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=J3HPX1A1NNHGKFKH}}", + "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", + "http_request": "{user_agent=cloudtrail.amazonaws.com}", + "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "class_name": "API Activity", + "class_uid": "3005", + "category_name": "Audit Activity", + "category_uid": "3", + "severity_id": "1", + "severity": "Informational", + "activity_name": "Update", + "activity_id": "3", + "type_uid": "300503", + "type_name": "API Activity: Update", + "status": "Success", + "status_id": "1", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1580}", + "region": "eu-central-1", + "accountid": "654338056632", + "eventday": "20231107", + "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" + }, + { + "metadata": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=85ca9b31-7607-40bd-8f65-0e6f892550a5, profiles=[cloud], version=1.0.0-rc.2}", + "time": "1699342908000", + "cloud": "{region=eu-central-1, provider=AWS}", + "api": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=0J1R23EBRVY6T7ZJ}}", + "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", + "http_request": "{user_agent=cloudtrail.amazonaws.com}", + "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "class_name": "API Activity", + "class_uid": "3005", + "category_name": "Audit Activity", + "category_uid": "3", + "severity_id": "1", + "severity": "Informational", + "activity_name": "Update", + "activity_id": "3", + "type_uid": "300503", + "type_name": "API Activity: Update", + "status": "Success", + "status_id": "1", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2276}", + "region": "eu-central-1", + "accountid": "654338056632", + "eventday": "20231107", + "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" + }, + { + "metadata": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=a7a07526-e890-4697-85a5-3d9cf4ab1e9b, profiles=[cloud], version=1.0.0-rc.2}", + "time": "1699342917000", + "cloud": "{region=eu-central-1, provider=AWS}", + "api": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BW3N96A9B3H9MVBS}}", + "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}", + "http_request": "{user_agent=s3.amazonaws.com}", + "src_endpoint": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}", + "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]", + "class_name": "API Activity", + "class_uid": "3005", + "category_name": "Audit Activity", + "category_uid": "3", + "severity_id": "1", + "severity": "Informational", + "activity_name": "Update", + "activity_id": "3", + "type_uid": "300503", + "type_name": "API Activity: Update", + "status": "Success", + "status_id": "1", + "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}", + "region": "eu-central-1", + "accountid": "654338056632", + "eventday": "20231107", + "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" + }, + { + "metadata": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=8f60fe7d-1b50-42f5-956c-cd0f60fc98ed, profiles=[cloud], version=1.0.0-rc.2}", + "time": "1699342948000", + "cloud": "{region=eu-central-1, provider=AWS}", + "api": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=8A48J06NSTRZZH41}}", + "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}", + "http_request": "{user_agent=s3.amazonaws.com}", + "src_endpoint": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}", + "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]", + "class_name": "API Activity", + "class_uid": "3005", + "category_name": "Audit Activity", + "category_uid": "3", + "severity_id": "1", + "severity": "Informational", + "activity_name": "Update", + "activity_id": "3", + "type_uid": "300503", + "type_name": "API Activity: Update", + "status": "Success", + "status_id": "1", + "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=960}", + "region": "eu-central-1", + "accountid": "654338056632", + "eventday": "20231107", + "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" + } +] \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_catalogs_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_catalogs_command.json new file mode 100644 index 000000000000..385c2df6ce1f --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_catalogs_command.json @@ -0,0 +1,9 @@ +{ + "AWS.SecurityLake.Catalog(val.CatalogName && val.CatalogName == obj.CatalogName)": [ + { + "CatalogName": "test", + "Type": "LAMBDA" + } + ], + "AWS.SecurityLake(true)" :{"CatalogNextToken": "test"} +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_data_lakes_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_data_lakes_command.json new file mode 100644 index 000000000000..bc0c2fecb7e2 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_data_lakes_command.json @@ -0,0 +1,34 @@ +[{ + "createStatus": "INITIALIZED", + "dataLakeArn": "test", + "encryptionConfiguration": { + "kmsKeyId": "test" + }, + "lifecycleConfiguration": { + "expiration": { + "days": 123 + }, + "transitions": [ + { + "days": 123, + "storageClass": "test" + } + ] + }, + "region": "test", + "replicationConfiguration": { + "regions": [ + "test" + ], + "roleArn": "test" + }, + "s3BucketArn": "test", + "updateStatus": { + "exception": { + "code": "test", + "reason": "test" + }, + "requestId": "test", + "status": "INITIALIZED" + } +}] \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_database_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_database_command.json new file mode 100644 index 000000000000..05617af1913b --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_database_command.json @@ -0,0 +1,12 @@ +{ + "AWS.SecurityLake.Database(val.Name && val.Name == obj.Name)": [ + { + "Name": "test", + "Description": "test", + "Parameters": { + "test_param": "test" + } + } + ], + "AWS.SecurityLake(true)": {"DatabaseNextToken": "test"} +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_sources_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_sources_command.json new file mode 100644 index 000000000000..7cc6e8545956 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_sources_command.json @@ -0,0 +1,21 @@ +{ + "AWS.SecurityLake.DataLakeSource.DataLakeArn": "test", + "AWS.SecurityLake.DataLakeSource.DataLakeSources":[ + { + "account": "test", + "eventClasses": [ + "test" + ], + "sourceName": "test", + "sourceStatuses": [ + { + "resource": "test", + "status": "COLLECTING" + } + ] + } + ] +, + "AWS.SecurityLake(true)": {"DataLakeSourceNextToken": "test"} + +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_table_metadata_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_table_metadata_command.json new file mode 100644 index 000000000000..6e993cc1c6b2 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/list_table_metadata_command.json @@ -0,0 +1,26 @@ +{ + "AWS.SecurityLake.TableMetadata(val.Name && val.Name == obj.Name)": [ + { + "Name": "test", + "TableType": "test", + "Columns": [ + { + "Name": "test", + "Type": "test", + "Comment": "test" + } + ], + "PartitionKeys": [ + { + "Name": "test", + "Type": "test", + "Comment": "test" + } + ], + "Parameters": { + "string": "test" + } + } + ], + "AWS.SecurityLake(true)" :{"TableMetadataNextToken": "test"} +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/start_query_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/start_query_command.json new file mode 100644 index 000000000000..c367fe0eda4d --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/start_query_command.json @@ -0,0 +1,4 @@ +{ + "AWS.SecurityLake.Query": "SELECT * FROM test_db.test_table", + "AWS.SecurityLake.QueryExecutionId": "b3c194e7-6580-421c-81fa-4b409e1ba04f" +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_hr/get_query_results_command.txt b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_hr/get_query_results_command.txt new file mode 100644 index 000000000000..dadfbb63bbcd --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_hr/get_query_results_command.txt @@ -0,0 +1,13 @@ +### AWS Athena Query Results +|accountid|activity_id|activity_name|actor|api|category_name|category_uid|class_name|class_uid|cloud|eventday|http_request|metadata|query_execution_id|region|resources|severity|severity_id|src_endpoint|status|status_id|time|type_name|type_uid|unmapped| +|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---| +| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5AV2YZSR7D9DFDW8}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=7c33bcd3-0252-4b28-b2c7-7f38ed881796, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342808000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480} | +| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BRW2SJ5SBFD7T91W}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=69d54eee-2c1c-4f51-b89f-45c7029695c6, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342833000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2347} | +| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=55TVMR6HYD2ABTWB}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=2a0b2b1a-bff6-4a89-93e5-801e3fdf3b92, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342772000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480} | +| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5E1S778MRDJEDVSR}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=fee096f2-611d-4b3c-b092-ea06e8a527ca, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137065000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1317} | +| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=VDKRDR9XNA8H2MF8}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=68481d66-4a3e-42fe-a878-a1e6a7176c16, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137170000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1199} | +| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=PN8HM14HVKG8ED12}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=6a5583e7-0463-439a-89c6-5431d3cd58fe, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137247000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1556} | +| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=J3HPX1A1NNHGKFKH}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=e3cc1c86-950f-4201-a362-3f8f68631a83, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342905000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1580} | +| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=0J1R23EBRVY6T7ZJ}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=85ca9b31-7607-40bd-8f65-0e6f892550a5, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342908000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2276} | +| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BW3N96A9B3H9MVBS}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=a7a07526-e890-4697-85a5-3d9cf4ab1e9b, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342917000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480} | +| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=8A48J06NSTRZZH41}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=8f60fe7d-1b50-42f5-956c-cd0f60fc98ed, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342948000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=960} | diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/get_query_execution.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/get_query_execution.json new file mode 100644 index 000000000000..dc84fda8a074 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/get_query_execution.json @@ -0,0 +1,50 @@ +{ + "QueryExecution": { + "QueryExecutionId": "b3c194e7-6580-421c-81fa-4b409e1ba04f", + "Query": "SELECT * FROM test_db.test_table", + "StatementType": "DML", + "ResultConfiguration": { + "OutputLocation": "s3://athena-queries-test/b3c194e7-6580-421c-81fa-4b409e1ba04f.csv" + }, + "ResultReuseConfiguration": { + "ResultReuseByAgeConfiguration": { + "Enabled": false + } + }, + "QueryExecutionContext": {}, + "Status": { + "State": "SUCCEEDED", + "SubmissionDateTime": "2023-11-07T10:01:03", + "CompletionDateTime": "2023-11-07T10:01:04" + }, + "Statistics": { + "EngineExecutionTimeInMillis": 1074, + "DataScannedInBytes": 86996, + "TotalExecutionTimeInMillis": 1296, + "QueryQueueTimeInMillis": 192, + "QueryPlanningTimeInMillis": 493, + "ServiceProcessingTimeInMillis": 30, + "ResultReuseInformation": { + "ReusedPreviousResult": false + } + }, + "WorkGroup": "primary", + "EngineVersion": { + "SelectedEngineVersion": "AUTO", + "EffectiveEngineVersion": "Athena engine version 3" + }, + "SubstatementType": "SELECT" + }, + "ResponseMetadata": { + "RequestId": "07c697eb-30b3-42a6-bb42-86afd1227d3f", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Tue, 07 Nov 2023 16:51:15 GMT", + "content-type": "application/x-amz-json-1.1", + "content-length": "2106", + "connection": "keep-alive", + "x-amzn-requestid": "07c697eb-30b3-42a6-bb42-86afd1227d3f" + }, + "RetryAttempts": 0 + } +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/get_query_results.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/get_query_results.json new file mode 100644 index 000000000000..25b9d0eb82d4 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/get_query_results.json @@ -0,0 +1,1222 @@ +{ + "UpdateCount": 0, + "ResultSet": { + "Rows": [ + { + "Data": [ + { + "VarCharValue": "metadata" + }, + { + "VarCharValue": "time" + }, + { + "VarCharValue": "cloud" + }, + { + "VarCharValue": "api" + }, + { + "VarCharValue": "dst_endpoint" + }, + { + "VarCharValue": "actor" + }, + { + "VarCharValue": "http_request" + }, + { + "VarCharValue": "src_endpoint" + }, + { + "VarCharValue": "resources" + }, + { + "VarCharValue": "class_name" + }, + { + "VarCharValue": "class_uid" + }, + { + "VarCharValue": "category_name" + }, + { + "VarCharValue": "category_uid" + }, + { + "VarCharValue": "severity_id" + }, + { + "VarCharValue": "severity" + }, + { + "VarCharValue": "user" + }, + { + "VarCharValue": "activity_name" + }, + { + "VarCharValue": "activity_id" + }, + { + "VarCharValue": "type_uid" + }, + { + "VarCharValue": "type_name" + }, + { + "VarCharValue": "status" + }, + { + "VarCharValue": "status_id" + }, + { + "VarCharValue": "mfa" + }, + { + "VarCharValue": "unmapped" + }, + { + "VarCharValue": "region" + }, + { + "VarCharValue": "accountid" + }, + { + "VarCharValue": "eventday" + } + ] + }, + { + "Data": [ + { + "VarCharValue": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=7c33bcd3-0252-4b28-b2c7-7f38ed881796, profiles=[cloud], version=1.0.0-rc.2}" + }, + { + "VarCharValue": "1699342808000" + }, + { + "VarCharValue": "{region=eu-central-1, provider=AWS}" + }, + { + "VarCharValue": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5AV2YZSR7D9DFDW8}}" + }, + {}, + { + "VarCharValue": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}" + }, + { + "VarCharValue": "{user_agent=s3.amazonaws.com}" + }, + { + "VarCharValue": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}" + }, + { + "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]" + }, + { + "VarCharValue": "API Activity" + }, + { + "VarCharValue": "3005" + }, + { + "VarCharValue": "Audit Activity" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "1" + }, + { + "VarCharValue": "Informational" + }, + {}, + { + "VarCharValue": "Update" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "300503" + }, + { + "VarCharValue": "API Activity: Update" + }, + { + "VarCharValue": "Success" + }, + { + "VarCharValue": "1" + }, + {}, + { + "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}" + }, + { + "VarCharValue": "eu-central-1" + }, + { + "VarCharValue": "654338056632" + }, + { + "VarCharValue": "20231107" + } + ] + }, + { + "Data": [ + { + "VarCharValue": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=69d54eee-2c1c-4f51-b89f-45c7029695c6, profiles=[cloud], version=1.0.0-rc.2}" + }, + { + "VarCharValue": "1699342833000" + }, + { + "VarCharValue": "{region=eu-central-1, provider=AWS}" + }, + { + "VarCharValue": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BRW2SJ5SBFD7T91W}}" + }, + {}, + { + "VarCharValue": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}" + }, + { + "VarCharValue": "{user_agent=cloudtrail.amazonaws.com}" + }, + { + "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" + }, + { + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + }, + { + "VarCharValue": "API Activity" + }, + { + "VarCharValue": "3005" + }, + { + "VarCharValue": "Audit Activity" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "1" + }, + { + "VarCharValue": "Informational" + }, + {}, + { + "VarCharValue": "Update" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "300503" + }, + { + "VarCharValue": "API Activity: Update" + }, + { + "VarCharValue": "Success" + }, + { + "VarCharValue": "1" + }, + {}, + { + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2347}" + }, + { + "VarCharValue": "eu-central-1" + }, + { + "VarCharValue": "654338056632" + }, + { + "VarCharValue": "20231107" + } + ] + }, + { + "Data": [ + { + "VarCharValue": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=2a0b2b1a-bff6-4a89-93e5-801e3fdf3b92, profiles=[cloud], version=1.0.0-rc.2}" + }, + { + "VarCharValue": "1699342772000" + }, + { + "VarCharValue": "{region=eu-central-1, provider=AWS}" + }, + { + "VarCharValue": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=55TVMR6HYD2ABTWB}}" + }, + {}, + { + "VarCharValue": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}" + }, + { + "VarCharValue": "{user_agent=s3.amazonaws.com}" + }, + { + "VarCharValue": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}" + }, + { + "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]" + }, + { + "VarCharValue": "API Activity" + }, + { + "VarCharValue": "3005" + }, + { + "VarCharValue": "Audit Activity" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "1" + }, + { + "VarCharValue": "Informational" + }, + {}, + { + "VarCharValue": "Update" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "300503" + }, + { + "VarCharValue": "API Activity: Update" + }, + { + "VarCharValue": "Success" + }, + { + "VarCharValue": "1" + }, + {}, + { + "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}" + }, + { + "VarCharValue": "eu-central-1" + }, + { + "VarCharValue": "654338056632" + }, + { + "VarCharValue": "20231107" + } + ] + }, + { + "Data": [ + { + "VarCharValue": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=fee096f2-611d-4b3c-b092-ea06e8a527ca, profiles=[cloud], version=1.0.0-rc.2}" + }, + { + "VarCharValue": "1699137065000" + }, + { + "VarCharValue": "{region=eu-central-1, provider=AWS}" + }, + { + "VarCharValue": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5E1S778MRDJEDVSR}}" + }, + {}, + { + "VarCharValue": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}" + }, + { + "VarCharValue": "{user_agent=cloudtrail.amazonaws.com}" + }, + { + "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" + }, + { + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + }, + { + "VarCharValue": "API Activity" + }, + { + "VarCharValue": "3005" + }, + { + "VarCharValue": "Audit Activity" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "1" + }, + { + "VarCharValue": "Informational" + }, + {}, + { + "VarCharValue": "Update" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "300503" + }, + { + "VarCharValue": "API Activity: Update" + }, + { + "VarCharValue": "Success" + }, + { + "VarCharValue": "1" + }, + {}, + { + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1317}" + }, + { + "VarCharValue": "eu-central-1" + }, + { + "VarCharValue": "654338056632" + }, + { + "VarCharValue": "20231104" + } + ] + }, + { + "Data": [ + { + "VarCharValue": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=68481d66-4a3e-42fe-a878-a1e6a7176c16, profiles=[cloud], version=1.0.0-rc.2}" + }, + { + "VarCharValue": "1699137170000" + }, + { + "VarCharValue": "{region=eu-central-1, provider=AWS}" + }, + { + "VarCharValue": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=VDKRDR9XNA8H2MF8}}" + }, + {}, + { + "VarCharValue": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}" + }, + { + "VarCharValue": "{user_agent=cloudtrail.amazonaws.com}" + }, + { + "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" + }, + { + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + }, + { + "VarCharValue": "API Activity" + }, + { + "VarCharValue": "3005" + }, + { + "VarCharValue": "Audit Activity" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "1" + }, + { + "VarCharValue": "Informational" + }, + {}, + { + "VarCharValue": "Update" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "300503" + }, + { + "VarCharValue": "API Activity: Update" + }, + { + "VarCharValue": "Success" + }, + { + "VarCharValue": "1" + }, + {}, + { + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1199}" + }, + { + "VarCharValue": "eu-central-1" + }, + { + "VarCharValue": "654338056632" + }, + { + "VarCharValue": "20231104" + } + ] + }, + { + "Data": [ + { + "VarCharValue": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=6a5583e7-0463-439a-89c6-5431d3cd58fe, profiles=[cloud], version=1.0.0-rc.2}" + }, + { + "VarCharValue": "1699137247000" + }, + { + "VarCharValue": "{region=eu-central-1, provider=AWS}" + }, + { + "VarCharValue": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=PN8HM14HVKG8ED12}}" + }, + {}, + { + "VarCharValue": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}" + }, + { + "VarCharValue": "{user_agent=cloudtrail.amazonaws.com}" + }, + { + "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" + }, + { + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + }, + { + "VarCharValue": "API Activity" + }, + { + "VarCharValue": "3005" + }, + { + "VarCharValue": "Audit Activity" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "1" + }, + { + "VarCharValue": "Informational" + }, + {}, + { + "VarCharValue": "Update" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "300503" + }, + { + "VarCharValue": "API Activity: Update" + }, + { + "VarCharValue": "Success" + }, + { + "VarCharValue": "1" + }, + {}, + { + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1556}" + }, + { + "VarCharValue": "eu-central-1" + }, + { + "VarCharValue": "654338056632" + }, + { + "VarCharValue": "20231104" + } + ] + }, + { + "Data": [ + { + "VarCharValue": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=e3cc1c86-950f-4201-a362-3f8f68631a83, profiles=[cloud], version=1.0.0-rc.2}" + }, + { + "VarCharValue": "1699342905000" + }, + { + "VarCharValue": "{region=eu-central-1, provider=AWS}" + }, + { + "VarCharValue": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=J3HPX1A1NNHGKFKH}}" + }, + {}, + { + "VarCharValue": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}" + }, + { + "VarCharValue": "{user_agent=cloudtrail.amazonaws.com}" + }, + { + "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" + }, + { + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + }, + { + "VarCharValue": "API Activity" + }, + { + "VarCharValue": "3005" + }, + { + "VarCharValue": "Audit Activity" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "1" + }, + { + "VarCharValue": "Informational" + }, + {}, + { + "VarCharValue": "Update" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "300503" + }, + { + "VarCharValue": "API Activity: Update" + }, + { + "VarCharValue": "Success" + }, + { + "VarCharValue": "1" + }, + {}, + { + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1580}" + }, + { + "VarCharValue": "eu-central-1" + }, + { + "VarCharValue": "654338056632" + }, + { + "VarCharValue": "20231107" + } + ] + }, + { + "Data": [ + { + "VarCharValue": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=85ca9b31-7607-40bd-8f65-0e6f892550a5, profiles=[cloud], version=1.0.0-rc.2}" + }, + { + "VarCharValue": "1699342908000" + }, + { + "VarCharValue": "{region=eu-central-1, provider=AWS}" + }, + { + "VarCharValue": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=0J1R23EBRVY6T7ZJ}}" + }, + {}, + { + "VarCharValue": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}" + }, + { + "VarCharValue": "{user_agent=cloudtrail.amazonaws.com}" + }, + { + "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" + }, + { + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + }, + { + "VarCharValue": "API Activity" + }, + { + "VarCharValue": "3005" + }, + { + "VarCharValue": "Audit Activity" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "1" + }, + { + "VarCharValue": "Informational" + }, + {}, + { + "VarCharValue": "Update" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "300503" + }, + { + "VarCharValue": "API Activity: Update" + }, + { + "VarCharValue": "Success" + }, + { + "VarCharValue": "1" + }, + {}, + { + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2276}" + }, + { + "VarCharValue": "eu-central-1" + }, + { + "VarCharValue": "654338056632" + }, + { + "VarCharValue": "20231107" + } + ] + }, + { + "Data": [ + { + "VarCharValue": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=a7a07526-e890-4697-85a5-3d9cf4ab1e9b, profiles=[cloud], version=1.0.0-rc.2}" + }, + { + "VarCharValue": "1699342917000" + }, + { + "VarCharValue": "{region=eu-central-1, provider=AWS}" + }, + { + "VarCharValue": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BW3N96A9B3H9MVBS}}" + }, + {}, + { + "VarCharValue": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}" + }, + { + "VarCharValue": "{user_agent=s3.amazonaws.com}" + }, + { + "VarCharValue": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}" + }, + { + "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]" + }, + { + "VarCharValue": "API Activity" + }, + { + "VarCharValue": "3005" + }, + { + "VarCharValue": "Audit Activity" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "1" + }, + { + "VarCharValue": "Informational" + }, + {}, + { + "VarCharValue": "Update" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "300503" + }, + { + "VarCharValue": "API Activity: Update" + }, + { + "VarCharValue": "Success" + }, + { + "VarCharValue": "1" + }, + {}, + { + "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}" + }, + { + "VarCharValue": "eu-central-1" + }, + { + "VarCharValue": "654338056632" + }, + { + "VarCharValue": "20231107" + } + ] + }, + { + "Data": [ + { + "VarCharValue": "{product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=8f60fe7d-1b50-42f5-956c-cd0f60fc98ed, profiles=[cloud], version=1.0.0-rc.2}" + }, + { + "VarCharValue": "1699342948000" + }, + { + "VarCharValue": "{region=eu-central-1, provider=AWS}" + }, + { + "VarCharValue": "{response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=8A48J06NSTRZZH41}}" + }, + {}, + { + "VarCharValue": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}" + }, + { + "VarCharValue": "{user_agent=s3.amazonaws.com}" + }, + { + "VarCharValue": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}" + }, + { + "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]" + }, + { + "VarCharValue": "API Activity" + }, + { + "VarCharValue": "3005" + }, + { + "VarCharValue": "Audit Activity" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "1" + }, + { + "VarCharValue": "Informational" + }, + {}, + { + "VarCharValue": "Update" + }, + { + "VarCharValue": "3" + }, + { + "VarCharValue": "300503" + }, + { + "VarCharValue": "API Activity: Update" + }, + { + "VarCharValue": "Success" + }, + { + "VarCharValue": "1" + }, + {}, + { + "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=960}" + }, + { + "VarCharValue": "eu-central-1" + }, + { + "VarCharValue": "654338056632" + }, + { + "VarCharValue": "20231107" + } + ] + } + ], + "ResultSetMetadata": { + "ColumnInfo": [ + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "metadata", + "Label": "metadata", + "Type": "row", + "Precision": 0, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "time", + "Label": "time", + "Type": "bigint", + "Precision": 19, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "cloud", + "Label": "cloud", + "Type": "row", + "Precision": 0, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "api", + "Label": "api", + "Type": "row", + "Precision": 0, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "dst_endpoint", + "Label": "dst_endpoint", + "Type": "row", + "Precision": 0, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "actor", + "Label": "actor", + "Type": "row", + "Precision": 0, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "http_request", + "Label": "http_request", + "Type": "row", + "Precision": 0, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "src_endpoint", + "Label": "src_endpoint", + "Type": "row", + "Precision": 0, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "resources", + "Label": "resources", + "Type": "array", + "Precision": 0, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "class_name", + "Label": "class_name", + "Type": "varchar", + "Precision": 2147483647, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": true + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "class_uid", + "Label": "class_uid", + "Type": "integer", + "Precision": 10, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "category_name", + "Label": "category_name", + "Type": "varchar", + "Precision": 2147483647, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": true + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "category_uid", + "Label": "category_uid", + "Type": "integer", + "Precision": 10, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "severity_id", + "Label": "severity_id", + "Type": "integer", + "Precision": 10, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "severity", + "Label": "severity", + "Type": "varchar", + "Precision": 2147483647, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": true + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "user", + "Label": "user", + "Type": "row", + "Precision": 0, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "activity_name", + "Label": "activity_name", + "Type": "varchar", + "Precision": 2147483647, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": true + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "activity_id", + "Label": "activity_id", + "Type": "integer", + "Precision": 10, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "type_uid", + "Label": "type_uid", + "Type": "integer", + "Precision": 10, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "type_name", + "Label": "type_name", + "Type": "varchar", + "Precision": 2147483647, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": true + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "status", + "Label": "status", + "Type": "varchar", + "Precision": 2147483647, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": true + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "status_id", + "Label": "status_id", + "Type": "integer", + "Precision": 10, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "mfa", + "Label": "mfa", + "Type": "boolean", + "Precision": 0, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "unmapped", + "Label": "unmapped", + "Type": "map", + "Precision": 0, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": false + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "region", + "Label": "region", + "Type": "varchar", + "Precision": 2147483647, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": true + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "accountid", + "Label": "accountid", + "Type": "varchar", + "Precision": 2147483647, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": true + }, + { + "CatalogName": "hive", + "SchemaName": "", + "TableName": "", + "Name": "eventday", + "Label": "eventday", + "Type": "varchar", + "Precision": 2147483647, + "Scale": 0, + "Nullable": "UNKNOWN", + "CaseSensitive": true + } + ] + } + }, + "ResponseMetadata": { + "RequestId": "4633521e-628b-491b-bb41-e4915567588c", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Wed, 08 Nov 2023 08:29:33 GMT", + "content-type": "application/x-amz-json-1.1", + "content-length": "72582", + "connection": "keep-alive", + "x-amzn-requestid": "4633521e-628b-491b-bb41-e4915567588c" + }, + "RetryAttempts": 0 + } +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_catalogs_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_catalogs_command.json new file mode 100644 index 000000000000..23a5b4f0b581 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_catalogs_command.json @@ -0,0 +1,8 @@ +{ + "DataCatalogsSummary": [ + { + "CatalogName": "test", + "Type": "LAMBDA" + } + ], + "NextToken": "test"} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_data_lakes_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_data_lakes_command.json new file mode 100644 index 000000000000..da3f0feda014 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_data_lakes_command.json @@ -0,0 +1,38 @@ +{ + "dataLakes": [ + { + "createStatus": "INITIALIZED", + "dataLakeArn": "test", + "encryptionConfiguration": { + "kmsKeyId": "test" + }, + "lifecycleConfiguration": { + "expiration": { + "days": 123 + }, + "transitions": [ + { + "days": 123, + "storageClass": "test" + } + ] + }, + "region": "test", + "replicationConfiguration": { + "regions": [ + "test" + ], + "roleArn": "test" + }, + "s3BucketArn": "test", + "updateStatus": { + "exception": { + "code": "test", + "reason": "test" + }, + "requestId": "test", + "status": "INITIALIZED" + } + } + ] +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_database_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_database_command.json new file mode 100644 index 000000000000..bd7a80753903 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_database_command.json @@ -0,0 +1,12 @@ +{ + "DatabaseList": [ + { + "Name": "test", + "Description": "test", + "Parameters": { + "test_param": "test" + } + } + ], + "NextToken": "test" +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_sources_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_sources_command.json new file mode 100644 index 000000000000..a65c603743d1 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_sources_command.json @@ -0,0 +1,19 @@ +{ + "dataLakeArn": "test", + "dataLakeSources": [ + { + "account": "test", + "eventClasses": [ + "test" + ], + "sourceName": "test", + "sourceStatuses": [ + { + "resource": "test", + "status": "COLLECTING" + } + ] + } + ], + "nextToken": "test" +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_table_metadata_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_table_metadata_command.json new file mode 100644 index 000000000000..621d067ec4c8 --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/list_table_metadata_command.json @@ -0,0 +1,26 @@ +{ + "TableMetadataList": [ + { + "Name": "test", + "TableType": "test", + "Columns": [ + { + "Name": "test", + "Type": "test", + "Comment": "test" + } + ], + "PartitionKeys": [ + { + "Name": "test", + "Type": "test", + "Comment": "test" + } + ], + "Parameters": { + "string": "test" + } + } + ], + "NextToken": "test" +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/start_query_execution.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/start_query_execution.json new file mode 100644 index 000000000000..aa7cbf5a4d3b --- /dev/null +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/start_query_execution.json @@ -0,0 +1,15 @@ +{ + "QueryExecutionId": "b3c194e7-6580-421c-81fa-4b409e1ba04f", + "ResponseMetadata": { + "RequestId": "90cd1f51-4cf6-4992-a435-11bf8bd791df", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Wed, 08 Nov 2023 09:07:39 GMT", + "content-type": "application/x-amz-json-1.1", + "content-length": "59", + "connection": "keep-alive", + "x-amzn-requestid": "90cd1f51-4cf6-4992-a435-11bf8bd791df" + }, + "RetryAttempts": 0 + } +} \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/README.md b/Packs/AWS-SecurityLake/README.md new file mode 100644 index 000000000000..a63416b0dc79 --- /dev/null +++ b/Packs/AWS-SecurityLake/README.md @@ -0,0 +1,18 @@ +### AWS Security Lake + +Amazon Security Lake is a fully managed security data lake service. You can use Security Lake to automatically centralize security data from AWS environments, SaaS providers, on premises, cloud sources, and third-party sources into a purpose-built data lake that's stored in your AWS account. +Security Lake helps you analyze security data, so you can get a more complete understanding of your security posture across the entire organization. With Security Lake, you can also improve the protection of your workloads, applications, and data. + +The data lake is backed by Amazon Simple Storage Service (Amazon S3) buckets, and you retain ownership over your data. + +## What does this pack do +### AWS SecurityLake +This integration enables you to: + +- List data lakes. +- List data catalogs. +- List databases. +- List table metadata. +- List data sources. +- Query AWS Athena. +- Run pre-defined queries on AWS Security Lake data. diff --git a/Packs/AWS-SecurityLake/TestPlaybooks/AWS_-_SecurityLake_Test_Playbbok.yml b/Packs/AWS-SecurityLake/TestPlaybooks/AWS_-_SecurityLake_Test_Playbbok.yml new file mode 100644 index 000000000000..d9b8febe8a2a --- /dev/null +++ b/Packs/AWS-SecurityLake/TestPlaybooks/AWS_-_SecurityLake_Test_Playbbok.yml @@ -0,0 +1,299 @@ +id: AWS_-_SecurityLake_Test_Playbook +version: -1 +name: AWS_-_SecurityLake_Test_Playbook +starttaskid: "0" +tasks: + "0": + id: "0" + taskid: ee59c709-f930-48e2-8976-d461cde3c3e9 + type: start + task: + id: ee59c709-f930-48e2-8976-d461cde3c3e9 + version: -1 + name: "" + iscommand: false + brand: "" + description: '' + nexttasks: + '#none#': + - "1" + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 50, + "y": 50 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "1": + id: "1" + taskid: 105d8eea-b36b-4b54-810e-2991dba795fb + type: regular + task: + id: 105d8eea-b36b-4b54-810e-2991dba795fb + version: -1 + name: List Catalogs + description: Lists the data catalogs in the current Amazon Web Services account. + script: '|||aws-security-lake-data-catalogs-list' + type: regular + iscommand: true + brand: "" + nexttasks: + '#none#': + - "6" + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 50, + "y": 195 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "2": + id: "2" + taskid: 889f293e-23cd-41c4-8219-28021fa569df + type: regular + task: + id: 889f293e-23cd-41c4-8219-28021fa569df + version: -1 + name: List Databases + description: Lists the databases in the specified data catalog. + script: '|||aws-security-lake-databases-list' + type: regular + iscommand: true + brand: "" + nexttasks: + '#none#': + - "9" + scriptarguments: + catalog_name: + simple: ${AWS.SecurityLake.Catalog.CatalogName} + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 50, + "y": 545 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "3": + id: "3" + taskid: d6c1317e-6083-4a1e-869f-2f5df2813220 + type: regular + task: + id: d6c1317e-6083-4a1e-869f-2f5df2813220 + version: -1 + name: List table metadata + description: Lists the metadata for the tables in the specified data catalog database. + script: '|||aws-security-lake-table-metadata-list' + type: regular + iscommand: true + brand: "" + nexttasks: + '#none#': + - "4" + scriptarguments: + catalog_name: + simple: ${AWS.SecurityLake.Catalog.CatalogName} + database_name: + simple: ${AWS.SecurityLake.Database.Name} + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 50, + "y": 895 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "4": + id: "4" + taskid: 59f35a9e-8445-4e06-8576-818148e1b0a2 + type: regular + task: + id: 59f35a9e-8445-4e06-8576-818148e1b0a2 + version: -1 + name: List sources + description: Retrieves a snapshot of the current region, including whether Amazon Security Lake is enabled for those accounts and which sources Security Lake is collecting data from. + script: '|||aws-security-lake-data-sources-list' + type: regular + iscommand: true + brand: "" + nexttasks: + '#none#': + - "5" + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 50, + "y": 1070 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "5": + id: "5" + taskid: 93606b8e-6e73-4a60-885e-2ec46f092773 + type: regular + task: + id: 93606b8e-6e73-4a60-885e-2ec46f092773 + version: -1 + name: List data Lakes + description: Retrieves the Amazon Security Lake configuration object for the specified Amazon Web Services Regions. + script: '|||aws-security-lake-data-lakes-list' + type: regular + iscommand: true + brand: "" + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 50, + "y": 1245 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "6": + id: "6" + taskid: 3b3284b2-84e6-4212-8ac7-9709eea9d918 + type: condition + task: + id: 3b3284b2-84e6-4212-8ac7-9709eea9d918 + version: -1 + name: Validate catalog + type: condition + iscommand: false + brand: "" + nexttasks: + "yes": + - "2" + separatecontext: false + conditions: + - label: "yes" + condition: + - - operator: isEqualString + left: + value: + simple: AWS.SecurityLake.Catalog.CatalogName + iscontext: true + right: + value: + simple: AwsDataCatalog + continueonerrortype: "" + view: |- + { + "position": { + "x": 50, + "y": 370 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "9": + id: "9" + taskid: 807591e1-1bf6-48de-8f27-a38a52f3fa7d + type: condition + task: + id: 807591e1-1bf6-48de-8f27-a38a52f3fa7d + version: -1 + name: Validate databases + type: condition + iscommand: false + brand: "" + nexttasks: + "yes": + - "3" + separatecontext: false + conditions: + - label: "yes" + condition: + - - operator: hasLength + left: + value: + simple: AWS.SecurityLake.Database + iscontext: true + right: + value: + simple: "3" + continueonerrortype: "" + view: |- + { + "position": { + "x": 50, + "y": 720 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false +view: |- + { + "linkLabelsPosition": {}, + "paper": { + "dimensions": { + "height": 1290, + "width": 380, + "x": 50, + "y": 50 + } + } + } +inputs: [] +outputs: [] +quiet: true +fromversion: 6.10.0 +description: '' diff --git a/Packs/AWS-SecurityLake/pack_metadata.json b/Packs/AWS-SecurityLake/pack_metadata.json new file mode 100644 index 000000000000..3aa1ec8031c1 --- /dev/null +++ b/Packs/AWS-SecurityLake/pack_metadata.json @@ -0,0 +1,19 @@ +{ + "name": "AWS - Security Lake", + "description": "Amazon Security Lake is a fully managed security data lake service.", + "support": "xsoar", + "currentVersion": "1.0.0", + "author": "Cortex XSOAR", + "url": "https://www.paloaltonetworks.com/cortex", + "email": "", + "categories": [ + "IT Services" + ], + "tags": [], + "useCases": [], + "keywords": [], + "marketplaces": [ + "xsoar", + "marketplacev2" + ] +} \ No newline at end of file diff --git a/Packs/AzureSecurityCenter/Integrations/AzureSecurityCenter_v2/AzureSecurityCenter_v2.yml b/Packs/AzureSecurityCenter/Integrations/AzureSecurityCenter_v2/AzureSecurityCenter_v2.yml index 44e6077ad3cf..29de2e176c36 100644 --- a/Packs/AzureSecurityCenter/Integrations/AzureSecurityCenter_v2/AzureSecurityCenter_v2.yml +++ b/Packs/AzureSecurityCenter/Integrations/AzureSecurityCenter_v2/AzureSecurityCenter_v2.yml @@ -466,7 +466,7 @@ script: type: Unknown - contextPath: Azure.ResourceGroupName.properties.provisioningState description: Resource group provisioning state. - dockerimage: demisto/crypto:1.0.0.83343 + dockerimage: demisto/crypto:1.0.0.86361 runonce: false script: '-' type: python diff --git a/Packs/AzureSecurityCenter/ReleaseNotes/2_0_21.md b/Packs/AzureSecurityCenter/ReleaseNotes/2_0_21.md new file mode 100644 index 000000000000..ab196de41f0d --- /dev/null +++ b/Packs/AzureSecurityCenter/ReleaseNotes/2_0_21.md @@ -0,0 +1,3 @@ +#### Integrations +##### Microsoft Defender for Cloud +- Updated the Docker image to: *demisto/crypto:1.0.0.86361*. diff --git a/Packs/AzureSecurityCenter/pack_metadata.json b/Packs/AzureSecurityCenter/pack_metadata.json index 4493dffd77b8..e273913052d0 100644 --- a/Packs/AzureSecurityCenter/pack_metadata.json +++ b/Packs/AzureSecurityCenter/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Microsoft Defender for Cloud", "description": "Unified security management and advanced threat protection across hybrid cloud workloads.", "support": "xsoar", - "currentVersion": "2.0.20", + "currentVersion": "2.0.21", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict.yml b/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict.yml index bc843e925f62..baa0bb3df044 100644 --- a/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict.yml +++ b/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict.yml @@ -142,42 +142,6 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false - "14": - id: "14" - taskid: 23d5414c-6989-467d-8f25-135e5cc83841 - type: regular - task: - id: 23d5414c-6989-467d-8f25-135e5cc83841 - version: -1 - name: Get WildFire report - description: Retrieves results for a file hash using WildFire. - script: '|||wildfire-report' - type: regular - iscommand: true - brand: "" - nexttasks: - '#none#': - - "42" - scriptarguments: - sha256: - complex: - root: inputs.FileSHA256 - separatecontext: false - continueonerror: true - view: |- - { - "position": { - "x": 2310, - "y": 1130 - } - } - note: false - timertriggers: [] - ignoreworker: false - skipunavailable: true - quietmode: 0 - isoversize: false - isautoswitchedtoquietmode: false "15": id: "15" taskid: 6825c22c-48d1-4f99-803f-1b6169565f9c @@ -232,8 +196,8 @@ tasks: task: id: 5a797f7b-e0f2-46de-85c1-0d2165ce20a9 version: -1 - name: Was the file found as Suspicious? - description: "Was the file found as suspicious?" + name: Was the file found as Benign? + description: Was the file found as suspicious? type: condition iscommand: false brand: "" @@ -241,7 +205,7 @@ tasks: '#default#': - "29" "yes": - - "14" + - "54" separatecontext: false conditions: - label: "yes" @@ -696,7 +660,7 @@ tasks: task: id: cbb20744-6ea2-4151-8575-3bbac0b2962e version: -1 - name: Set file verdict + name: Set file verdict suspicious description: Set the SuspectedVerdict key in context to Suspicious File. scriptName: Set type: regular @@ -709,22 +673,13 @@ tasks: key: simple: FileVerdict value: - complex: - root: WildFire.Verdicts - accessor: VerdictDescription - transformers: - - operator: SetIfEmpty - args: - applyIfEmpty: {} - defaultValue: - value: - simple: Suspicious + simple: Suspicious separatecontext: false view: |- { "position": { - "x": 2310, - "y": 1450 + "x": 2320, + "y": 1190 } } note: false @@ -961,41 +916,6 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false - "42": - id: "42" - taskid: 765d45fd-edfa-4084-8e93-ee9b3687c228 - type: regular - task: - id: 765d45fd-edfa-4084-8e93-ee9b3687c228 - version: -1 - name: Get WildFire verdict - description: Returns a verdict for a hash. - script: '|||wildfire-get-verdict' - type: regular - iscommand: true - brand: "" - nexttasks: - '#none#': - - "29" - scriptarguments: - hash: - complex: - root: inputs.FileSHA256 - separatecontext: false - view: |- - { - "position": { - "x": 2310, - "y": 1290 - } - } - note: false - timertriggers: [] - ignoreworker: false - skipunavailable: true - quietmode: 0 - isoversize: false - isautoswitchedtoquietmode: false "43": id: "43" taskid: 30038490-f60a-4a8a-8edd-06bd7af8e182 @@ -1347,6 +1267,43 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + "54": + id: "54" + taskid: 22852879-29ee-4b24-8286-57c1d6f5f3ef + type: regular + task: + id: 22852879-29ee-4b24-8286-57c1d6f5f3ef + version: -1 + name: Set file verdict benign + description: Set the SuspectedVerdict key in context to Suspicious File. + scriptName: Set + type: regular + iscommand: false + brand: "" + nexttasks: + '#none#': + - "25" + scriptarguments: + key: + simple: FileVerdict + value: + simple: Benign + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 1880, + "y": 1190 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false system: true view: |- { diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict_README.md index a1030089a7ef..b11545db9c85 100644 --- a/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict_README.md +++ b/Packs/CommonPlaybooks/Playbooks/playbook-Enrichment_for_Verdict_README.md @@ -6,12 +6,12 @@ This playbook uses the following sub-playbooks, integrations, and scripts. ### Sub-playbooks +* URL Enrichment - Generic v2 * Domain Enrichment - Generic v2 * Get prevalence for IOCs -* IP Enrichment - Generic v2 -* Account Enrichment - Generic v2.1 -* URL Enrichment - Generic v2 * File Reputation +* Account Enrichment - Generic v2.1 +* IP Enrichment - Generic v2 ### Integrations @@ -19,13 +19,12 @@ This playbook does not use any integrations. ### Scripts -* SearchIncidentsV2 * Set +* SearchIncidentsV2 ### Commands -* wildfire-get-verdict -* wildfire-report +This playbook does not use any commands. ## Playbook Inputs diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-File_Reputation.yml b/Packs/CommonPlaybooks/Playbooks/playbook-File_Reputation.yml index c04d48bf21ea..d7867bc79236 100644 --- a/Packs/CommonPlaybooks/Playbooks/playbook-File_Reputation.yml +++ b/Packs/CommonPlaybooks/Playbooks/playbook-File_Reputation.yml @@ -26,6 +26,7 @@ tasks: - "7" - "4" - "18" + - "25" separatecontext: false view: |- { @@ -129,7 +130,7 @@ tasks: note: false timertriggers: [] ignoreworker: false - skipunavailable: false + skipunavailable: true quietmode: 0 isoversize: false isautoswitchedtoquietmode: false @@ -346,7 +347,7 @@ tasks: task: id: afa05da2-350f-4d09-85f1-7d9ddb31477c version: -1 - name: Set file verdict - NSRL + name: Set file verdict - IsNSRL description: Set a value in context under the key you entered. scriptName: Set type: regular @@ -682,7 +683,7 @@ tasks: task: id: bb096e6a-72b8-43f3-81a3-14633d6a58d3 version: -1 - name: Set file verdict - NSRL + name: Set file verdict - IsNotNSRL description: Set a value in context under the key you entered. scriptName: Set type: regular @@ -754,7 +755,7 @@ tasks: task: id: 9bc117b9-97ee-4aa4-81b5-e2ca1e5c9549 version: -1 - name: Set file verdict - XDR-TrustedSigners + name: Set file verdict - XDR-UnTrustedSigners description: Set a value in context under the key you entered. scriptName: Set type: regular @@ -825,6 +826,112 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + "25": + id: "25" + taskid: 825810e7-b8fb-4347-894b-51dae87fcb7f + type: title + task: + id: 825810e7-b8fb-4347-894b-51dae87fcb7f + version: -1 + name: WildFire + type: title + iscommand: false + brand: "" + description: '' + nexttasks: + '#none#': + - "27" + - "28" + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": -2950, + "y": -440 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "27": + id: "27" + taskid: 7bcc68bb-ba26-4690-8535-395ec5fbb28f + type: regular + task: + id: 7bcc68bb-ba26-4690-8535-395ec5fbb28f + version: -1 + name: Get WildFire report + description: Retrieves results for a file hash using WildFire. + script: '|||wildfire-report' + type: regular + iscommand: true + brand: "" + nexttasks: + '#none#': + - "3" + scriptarguments: + sha256: + complex: + root: inputs.FileSHA256 + separatecontext: false + continueonerror: true + continueonerrortype: "" + view: |- + { + "position": { + "x": -2730, + "y": 90 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: true + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "28": + id: "28" + taskid: e91fa96b-14ce-40a2-8ba6-2607ee95ea47 + type: regular + task: + id: e91fa96b-14ce-40a2-8ba6-2607ee95ea47 + version: -1 + name: Get WildFire verdict + description: Returns a verdict for a hash. + script: '|||wildfire-get-verdict' + type: regular + iscommand: true + brand: "" + nexttasks: + '#none#': + - "3" + scriptarguments: + hash: + complex: + root: inputs.FileSHA256 + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": -3170, + "y": 90 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: true + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false +system: true view: |- { "linkLabelsPosition": { @@ -840,8 +947,8 @@ view: |- "paper": { "dimensions": { "height": 1195, - "width": 3540, - "x": -2280, + "width": 4430, + "x": -3170, "y": -600 } } @@ -879,6 +986,12 @@ outputs: - contextPath: XDRFileSigners description: XDR file signers. type: unknown +- contextPath: WildFire.Report + description: WildFire report details. + type: unknown +- contextPath: WildFire.Verdicts + description: WildFire verdict. + type: unknown tests: - No tests. fromversion: 6.6.0 diff --git a/Packs/CommonPlaybooks/Playbooks/playbook-File_Reputation_README.md b/Packs/CommonPlaybooks/Playbooks/playbook-File_Reputation_README.md index 1220ee154eaf..9860967a1f56 100644 --- a/Packs/CommonPlaybooks/Playbooks/playbook-File_Reputation_README.md +++ b/Packs/CommonPlaybooks/Playbooks/playbook-File_Reputation_README.md @@ -10,23 +10,31 @@ Note: a user can provide a list of trusted signers of his own using the playbook ## Dependencies + This playbook uses the following sub-playbooks, integrations, and scripts. ### Sub-playbooks + This playbook does not use any sub-playbooks. ### Integrations + This playbook does not use any integrations. ### Scripts + +* Set * http * ParseJSON -* Set ### Commands + +* wildfire-report +* wildfire-get-verdict * file ## Playbook Inputs + --- | **Name** | **Description** | **Default Value** | **Required** | @@ -36,6 +44,7 @@ This playbook does not use any integrations. | FileSHA256 | The file SHA256. | | Optional | ## Playbook Outputs + --- | **Path** | **Description** | **Type** | @@ -44,7 +53,11 @@ This playbook does not use any integrations. | NSRLFileVerdict | NSRL file verdict. | unknown | | VTFileSigners | VirusTotal file signers. | unknown | | XDRFileSigners | XDR file signers. | unknown | +| WildFire.Report | WildFire report details. | unknown | +| WildFire.Verdicts | WildFire verdict. | unknown | ## Playbook Image + --- -![File Reputation](https://raw.githubusercontent.com/demisto/content/48a7f1a1a628a2755201c55c24bc68d94e0dd49c/Packs/CommonPlaybooks/doc_files/File_Reputation.png) \ No newline at end of file + +![File Reputation](../doc_files/File_Reputation.png) diff --git a/Packs/CommonPlaybooks/ReleaseNotes/2_6_7.md b/Packs/CommonPlaybooks/ReleaseNotes/2_6_7.md new file mode 100644 index 000000000000..1575ca4a9e05 --- /dev/null +++ b/Packs/CommonPlaybooks/ReleaseNotes/2_6_7.md @@ -0,0 +1,10 @@ + +#### Playbooks + +##### File Reputation + +- Added a flow to get the file reputation from WildFire + +##### Enrichment for Verdict + +- Removed the WildFire reputation flow and moved it to the File Reputation playbook \ No newline at end of file diff --git a/Packs/CommonPlaybooks/doc_files/Enrichment_for_Verdict.png b/Packs/CommonPlaybooks/doc_files/Enrichment_for_Verdict.png index 36c9fc3c02a5..0cf2996a5003 100644 Binary files a/Packs/CommonPlaybooks/doc_files/Enrichment_for_Verdict.png and b/Packs/CommonPlaybooks/doc_files/Enrichment_for_Verdict.png differ diff --git a/Packs/CommonPlaybooks/doc_files/File_Reputation.png b/Packs/CommonPlaybooks/doc_files/File_Reputation.png index 44eb106c875c..faac84f6d6b7 100644 Binary files a/Packs/CommonPlaybooks/doc_files/File_Reputation.png and b/Packs/CommonPlaybooks/doc_files/File_Reputation.png differ diff --git a/Packs/CommonPlaybooks/pack_metadata.json b/Packs/CommonPlaybooks/pack_metadata.json index 189721fe6b83..159a1e910c10 100644 --- a/Packs/CommonPlaybooks/pack_metadata.json +++ b/Packs/CommonPlaybooks/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Common Playbooks", "description": "Frequently used playbooks pack.", "support": "xsoar", - "currentVersion": "2.6.6", + "currentVersion": "2.6.7", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/ContentManagement/IncidentFields/incidentfield-ConfigurationFileSource.json b/Packs/ContentManagement/IncidentFields/incidentfield-ConfigurationFileSource.json index 7e764db9d25b..b37b438ea2fd 100644 --- a/Packs/ContentManagement/IncidentFields/incidentfield-ConfigurationFileSource.json +++ b/Packs/ContentManagement/IncidentFields/incidentfield-ConfigurationFileSource.json @@ -15,7 +15,8 @@ "Attachment", "GitHub", "Gitlab", - "AzureDevOps" + "AzureDevOps", + "GoogleCloudStorage" ], "useAsKpi": false, "locked": false, diff --git a/Packs/ContentManagement/Playbooks/playbook-Configuration_Setup.yml b/Packs/ContentManagement/Playbooks/playbook-Configuration_Setup.yml index 13fef4925e0e..e52c9c155542 100644 --- a/Packs/ContentManagement/Playbooks/playbook-Configuration_Setup.yml +++ b/Packs/ContentManagement/Playbooks/playbook-Configuration_Setup.yml @@ -71,7 +71,7 @@ tasks: { "position": { "x": 275, - "y": 545 + "y": 555 } } note: false @@ -628,6 +628,8 @@ tasks: - '35' AzureDevOps: - "37" + GOOGLE CLOUD STORAGE: + - "39" separatecontext: false conditions: - label: Attachment @@ -670,6 +672,16 @@ tasks: right: value: simple: AzureDevOps + - label: GOOGLE CLOUD STORAGE + condition: + - - operator: isEqualString + left: + value: + simple: incident.configurationfilesource + iscontext: true + right: + value: + simple: GoogleCloudStorage view: |- { "position": { @@ -1075,7 +1087,8 @@ tasks: ref: simple: ${incident.branchname} using: - simple: Gitlab Gold + complex: + root: inputs.GitlabInstanceName separatecontext: false view: |- { @@ -1215,6 +1228,43 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + "39": + id: "39" + taskid: 06f49ef0-a41e-4dc4-8b97-1cc696df593f + type: regular + task: + id: 06f49ef0-a41e-4dc4-8b97-1cc696df593f + version: -1 + name: Get Configuration File from Google cloud storage + description: Retrieves object data into a file. + script: Google Cloud Storage|||gcs-download-file + type: regular + iscommand: true + brand: Google Cloud Storage + nexttasks: + '#none#': + - "2" + scriptarguments: + bucket_name: + simple: ${incident.cicds3bucketname} + object_name: + simple: ${incident.configfilepath} + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 190, + "y": 380 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false view: |- { "linkLabelsPosition": {}, @@ -1233,6 +1283,11 @@ inputs: required: false description: Core REST API instance name to use. playbookInputQuery: +- key: GitlabInstanceName + value: {} + required: false + description: Gitlab instance name to use. + playbookInputQuery: outputs: [] tests: - No tests (auto formatted) diff --git a/Packs/ContentManagement/Playbooks/playbook-Configuration_Setup_README.md b/Packs/ContentManagement/Playbooks/playbook-Configuration_Setup_README.md index 59eb7d066827..861e80691058 100644 --- a/Packs/ContentManagement/Playbooks/playbook-Configuration_Setup_README.md +++ b/Packs/ContentManagement/Playbooks/playbook-Configuration_Setup_README.md @@ -27,6 +27,7 @@ Playbook for the CÖ¹onfiguration Setup incident type. | **Name** | **Description** | **Default Value** | **Required** | | --- | --- | --- | --- | | InstanceName | Core REST API instance name to use. | | Optional | +| GitlabInstanceName | Gitlab instance name to use. | | Optional | ## Playbook Outputs --- diff --git a/Packs/ContentManagement/ReleaseNotes/1_2_17.md b/Packs/ContentManagement/ReleaseNotes/1_2_17.md new file mode 100644 index 000000000000..45c5bd57afa7 --- /dev/null +++ b/Packs/ContentManagement/ReleaseNotes/1_2_17.md @@ -0,0 +1,12 @@ + +#### Incident Fields + +- **Configuration File Source** + Added the option to get the Configuration File from Google Cloud Storage. + +#### Playbooks + +##### Configuration Setup + +- Added the option to get the Configuration File from Google Cloud Storage. +- Added the input 'GitlabInstanceName' to use specific gitlab instance in the playbook. diff --git a/Packs/ContentManagement/pack_metadata.json b/Packs/ContentManagement/pack_metadata.json index 112e9e90d002..9a7199f3ca40 100644 --- a/Packs/ContentManagement/pack_metadata.json +++ b/Packs/ContentManagement/pack_metadata.json @@ -2,7 +2,7 @@ "name": "XSOAR CI/CD", "description": "This pack enables you to orchestrate your XSOAR system configuration.", "support": "xsoar", - "currentVersion": "1.2.16", + "currentVersion": "1.2.17", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation.yml b/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation.yml index ffb0b024d590..8da1d6d8bc37 100644 --- a/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation.yml +++ b/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation.yml @@ -99,14 +99,6 @@ tasks: value: simple: Suspicious ignorecase: true - - operator: isEqualString - left: - value: - simple: FileVerdict - iscontext: true - right: - value: - simple: Malicious - - operator: isEqualString left: value: @@ -225,9 +217,11 @@ tasks: brand: '' nexttasks: '#default#': - - '39' + - "39" Benign: - - "77" + - "85" + Greyware: + - "86" separatecontext: false conditions: - label: Benign @@ -241,31 +235,31 @@ tasks: iscontext: true right: value: - simple: '0' - - operator: isEqualString + simple: "0" + - label: Greyware + condition: + - - operator: isEqualString left: value: - complex: - root: inputs.GraywareAsMalware + simple: WildFire.Verdicts.Verdict iscontext: true right: value: - simple: 'False' + simple: "2" - - operator: isEqualString left: value: - complex: - root: inputs.ShouldRescanBenign + simple: inputs.GraywareAsMalware iscontext: true right: value: - simple: 'True' - ignorecase: true + simple: "False" + continueonerrortype: "" view: |- { "position": { "x": 900, - "y": 875 + "y": 825 } } note: false @@ -275,9 +269,8 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false - continueonerrortype: "" - '33': - id: '33' + "33": + id: "33" taskid: 6b6f4826-2aa2-456a-8587-93f122f1c99e type: title task: @@ -428,7 +421,7 @@ tasks: { "position": { "x": 900, - "y": 2910 + "y": 2800 } } note: false @@ -472,7 +465,7 @@ tasks: { "position": { "x": 900, - "y": 2670 + "y": 2560 } } note: false @@ -550,7 +543,7 @@ tasks: { "position": { "x": 900, - "y": 3410 + "y": 3300 } } note: false @@ -596,7 +589,7 @@ tasks: { "position": { "x": 900, - "y": 3750 + "y": 3640 } } note: false @@ -627,7 +620,7 @@ tasks: { "position": { "x": 1270, - "y": 3920 + "y": 3810 } } note: false @@ -658,7 +651,7 @@ tasks: { "position": { "x": 900, - "y": 3255 + "y": 3145 } } note: false @@ -685,8 +678,8 @@ tasks: view: |- { "position": { - "x": 430, - "y": 4290 + "x": 40, + "y": 4170 } } note: false @@ -889,8 +882,8 @@ tasks: view: |- { "position": { - "x": 1280, - "y": 1720 + "x": 1340, + "y": 1590 } } note: false @@ -926,8 +919,8 @@ tasks: view: |- { "position": { - "x": 1280, - "y": 1880 + "x": 1340, + "y": 1750 } } note: false @@ -1135,7 +1128,7 @@ tasks: { "position": { "x": 900, - "y": 2500 + "y": 2390 } } note: false @@ -1265,7 +1258,7 @@ tasks: { "position": { "x": 1300, - "y": 3050 + "y": 2940 } } note: false @@ -1398,7 +1391,7 @@ tasks: { "position": { "x": 1370, - "y": 3580 + "y": 3470 } } note: false @@ -1457,8 +1450,8 @@ tasks: view: |- { "position": { - "x": 430, - "y": 2500 + "x": 40, + "y": 2390 } } note: false @@ -1493,8 +1486,8 @@ tasks: view: |- { "position": { - "x": 1280, - "y": 2040 + "x": 1340, + "y": 1910 } } note: false @@ -1577,8 +1570,8 @@ tasks: view: |- { "position": { - "x": 1280, - "y": 2200 + "x": 1340, + "y": 2070 } } note: false @@ -1624,7 +1617,7 @@ tasks: { "position": { "x": 1760, - "y": 3050 + "y": 2940 } } note: false @@ -1742,7 +1735,7 @@ tasks: { "position": { "x": 1760, - "y": 2910 + "y": 2800 } } note: false @@ -1777,8 +1770,8 @@ tasks: view: |- { "position": { - "x": 1280, - "y": 1280 + "x": 1340, + "y": 1150 } } note: false @@ -1820,11 +1813,20 @@ tasks: right: value: simple: Success + - operator: isEqualString + left: + value: + simple: inputs.ShouldRescanBenign + iscontext: true + right: + value: + simple: "True" + ignorecase: true view: |- { "position": { - "x": 1280, - "y": 1480 + "x": 1340, + "y": 1350 } } note: false @@ -1851,7 +1853,6 @@ tasks: '#none#': - "80" separatecontext: false - continueonerrortype: "" view: |- { "position": { @@ -2086,6 +2087,69 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + "85": + id: "85" + taskid: d5e7fd85-67c3-44c5-8f35-f08f3727a564 + type: title + task: + id: d5e7fd85-67c3-44c5-8f35-f08f3727a564 + version: -1 + name: Benign + type: title + iscommand: false + brand: "" + description: '' + nexttasks: + '#none#': + - "77" + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 1560, + "y": 1010 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "86": + id: "86" + taskid: 4457cf9c-89af-450f-8fe6-c211a98c3eca + type: title + task: + id: 4457cf9c-89af-450f-8fe6-c211a98c3eca + version: -1 + name: Greyware + type: title + iscommand: false + brand: "" + description: '' + nexttasks: + '#none#': + - "77" + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 1110, + "y": 1010 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false +system: true view: |- { "linkLabelsPosition": { diff --git a/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation_README.md b/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation_README.md index e9a64657d285..345214b4bbe3 100644 --- a/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation_README.md +++ b/Packs/Core/Playbooks/playbook-Local_Analysis_alert_Investigation_README.md @@ -41,14 +41,14 @@ This playbook uses the following sub-playbooks, integrations, and scripts. ### Sub-playbooks -* Containment Plan -* Handle False Positive Alerts -* Ticket Management - Generic * Wildfire Detonate and Analyze File * Enrichment for Verdict +* Recovery Plan * Endpoint Investigation Plan +* Ticket Management - Generic +* Containment Plan * Eradication Plan -* Recovery Plan +* Handle False Positive Alerts ### Integrations @@ -62,11 +62,11 @@ This playbook uses the following sub-playbooks, integrations, and scripts. ### Commands * core-retrieve-file-details -* closeInvestigation -* core-report-incorrect-wildfire -* internal-wildfire-get-report * setParentIncidentFields +* internal-wildfire-get-report +* closeInvestigation * core-retrieve-files +* core-report-incorrect-wildfire ## Playbook Inputs diff --git a/Packs/Core/ReleaseNotes/3_0_17.md b/Packs/Core/ReleaseNotes/3_0_17.md new file mode 100644 index 000000000000..2f89d60e4850 --- /dev/null +++ b/Packs/Core/ReleaseNotes/3_0_17.md @@ -0,0 +1,6 @@ + +#### Playbooks + +##### Local Analysis alert Investigation + +- Fixed the WildFire verdict decision tas number 14 to handle correctly Benign and Malware verdicts diff --git a/Packs/Core/doc_files/Local_Analysis_alert_Investigation.png b/Packs/Core/doc_files/Local_Analysis_alert_Investigation.png index 9e574181d735..f7a6fdda057c 100644 Binary files a/Packs/Core/doc_files/Local_Analysis_alert_Investigation.png and b/Packs/Core/doc_files/Local_Analysis_alert_Investigation.png differ diff --git a/Packs/Core/pack_metadata.json b/Packs/Core/pack_metadata.json index b0ed5bbd6110..2204eb640190 100644 --- a/Packs/Core/pack_metadata.json +++ b/Packs/Core/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Core - Investigation and Response", "description": "Automates incident response", "support": "xsoar", - "currentVersion": "3.0.16", + "currentVersion": "3.0.17", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py index 5b3799fc82c5..ed70f3d33a39 100644 --- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py +++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.py @@ -5238,7 +5238,7 @@ def cs_falcon_spotlight_list_host_by_vulnerability_command(args: dict) -> Comman outputs_prefix="CrowdStrike.VulnerabilityHost", outputs_key_field="id") -def get_cve_command(args: dict) -> list[CommandResults]: +def get_cve_command(args: dict) -> list[dict[str, Any]]: """ Get a list of vulnerabilities by spotlight : args: filter which include params or filter param. @@ -5269,10 +5269,10 @@ def get_cve_command(args: dict) -> list[CommandResults]: 'Base Score': cve.get('base_score')} human_readable = tableToMarkdown('CrowdStrike Falcon CVE', cve_human_readable, headers=['ID', 'Description', 'Published Date', 'Base Score']) - command_results_list.append(CommandResults(raw_response=cve, - readable_output=human_readable, - relationships=relationships_list, - indicator=cve_indicator)) + command_results = CommandResults(raw_response=cve, readable_output=human_readable, relationships=relationships_list, + indicator=cve_indicator).to_context() + if command_results not in command_results_list: + command_results_list.append(command_results) return command_results_list diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml index 1742ececae97..f9d0ebcc0b6b 100644 --- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml +++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon.yml @@ -4962,7 +4962,7 @@ script: - contextPath: CrowdStrike.IOARules.version_ids description: The IOA Rule's version ID. type: String - dockerimage: demisto/py3-tools:1.0.0.86064 + dockerimage: demisto/py3-tools:1.0.0.86612 isfetch: true ismappable: true isremotesyncin: true diff --git a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py index c8f285169398..a7cde8a8792c 100644 --- a/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py +++ b/Packs/CrowdStrikeFalcon/Integrations/CrowdStrikeFalcon/CrowdStrikeFalcon_test.py @@ -6704,3 +6704,28 @@ def test_get_incident_behavior_command(mocker): assert result.outputs_prefix == 'CrowdStrike.IncidentBehavior' assert result.outputs_key_field == 'behavior_id' assert result.outputs == api_mock['resources'] + + +def test_get_cve_command(mocker): + """ + Given: + - Raw response with duplicates + When: + - Running cve command + Then: + - Validate that the response doesn't contain duplicates + """ + import CrowdStrikeFalcon + + raw1 = {"id": "CVE-2023-12345", "description": "A1", "published_date": "2023-12-10T10:15:00Z", "base_score": 10, + "vector": "A1B2C3D4", "cisa_info": {"due_date": "2023-12-24T00:00:00Z", "is_cisa_kev": True}, + "actors": ["ALPHA", "BETA", "GAMMA"]} + raw2 = {"id": "CVE-2023-12345", "description": "A1", "published_date": "2023-12-10T10:15:00Z", "base_score": 10, + "vector": "A1B2C3D4", "cisa_info": {"due_date": "2023-12-24T00:00:00Z", "is_cisa_kev": False}, + "actors": ["ALPHA", "BETA", "GAMMA"]} + http_response = {'resources': [{'cve': raw1}, {'cve': raw1}, {'cve': raw1}, {'cve': raw2}, {'cve': raw1}, {'cve': raw2}]} + + mocker.patch.object(CrowdStrikeFalcon, 'http_request', return_value=http_response) + + results = CrowdStrikeFalcon.get_cve_command(args={'cve': 'CVE-2023-12345'}) + assert len(results) == 2 diff --git a/Packs/CrowdStrikeFalcon/ReleaseNotes/1_12_16.md b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_12_16.md new file mode 100644 index 000000000000..ec93b53ce67b --- /dev/null +++ b/Packs/CrowdStrikeFalcon/ReleaseNotes/1_12_16.md @@ -0,0 +1,7 @@ + +#### Integrations + +##### CrowdStrike Falcon + +- Improved implementation of the ***cve*** command to avoid returning duplicates. +- Updated the Docker image to: *demisto/py3-tools:1.0.0.86612*. diff --git a/Packs/CrowdStrikeFalcon/pack_metadata.json b/Packs/CrowdStrikeFalcon/pack_metadata.json index bf7d501be0c3..0ff93191a01a 100644 --- a/Packs/CrowdStrikeFalcon/pack_metadata.json +++ b/Packs/CrowdStrikeFalcon/pack_metadata.json @@ -2,7 +2,7 @@ "name": "CrowdStrike Falcon", "description": "The CrowdStrike Falcon OAuth 2 API (formerly the Falcon Firehose API), enables fetching and resolving detections, searching devices, getting behaviors by ID, containing hosts, and lifting host containment.", "support": "xsoar", - "currentVersion": "1.12.15", + "currentVersion": "1.12.16", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/EmailCommunication/ReleaseNotes/2_0_24.md b/Packs/EmailCommunication/ReleaseNotes/2_0_24.md new file mode 100644 index 000000000000..670805640221 --- /dev/null +++ b/Packs/EmailCommunication/ReleaseNotes/2_0_24.md @@ -0,0 +1,9 @@ + +#### Scripts + +##### SendEmailReply + +- Fixed an issue with rendering Markdown in the email body to HTML. +- Fixed an issue with the `body_type` argument missing from the documentation. +- Fixed an issue with the `reputation_calc_async` argument not being used. +- Updated the Docker image to: *demisto/bs4-py3:1.0.0.86348*. diff --git a/Packs/EmailCommunication/Scripts/SendEmailReply/README.md b/Packs/EmailCommunication/Scripts/SendEmailReply/README.md index 3ed657826992..b7e8fc334e98 100644 --- a/Packs/EmailCommunication/Scripts/SendEmailReply/README.md +++ b/Packs/EmailCommunication/Scripts/SendEmailReply/README.md @@ -37,6 +37,8 @@ For more information, see the section about permissions here: [https://docs-cort | service_mail | The email address the emails are sent from. | | mail_sender_instance | Name of the mail sender instance name for transmitting emails | | new_thread | Specify whether to reply to an existing thread or start a new one. Default value of 'n/a' is for 'Email Communication' type incidents only | +| body_type | The type of the email body. Can be either HTML or plain text. Default is HTML. | +| reputation_calc_async| Whether to calculate the reputation asynchronously. Default is false. | ## Outputs --- diff --git a/Packs/EmailCommunication/Scripts/SendEmailReply/SendEmailReply.py b/Packs/EmailCommunication/Scripts/SendEmailReply/SendEmailReply.py index 587c53e080cc..47a59a1a6f49 100644 --- a/Packs/EmailCommunication/Scripts/SendEmailReply/SendEmailReply.py +++ b/Packs/EmailCommunication/Scripts/SendEmailReply/SendEmailReply.py @@ -5,8 +5,37 @@ import random import re from datetime import datetime as dt +from markdown import Extension, markdown +from markdown.inlinepatterns import UnderscoreProcessor, EmStrongItem ERROR_TEMPLATE = 'ERROR: SendEmailReply - {function_name}: {reason}' +UNDERLINE_RE = r'(\+)([^+]+)\1' # +underline+ -> underline +STRIKETHROUGH_RE = r'(~{2})(.+?)\1' # ~~Strikethrough~~ -> Strikethrough + + +class DemistoUnderlineProcessor(UnderscoreProcessor): + """Processor for handling Underline.""" + + PATTERNS = [ + EmStrongItem(re.compile(UNDERLINE_RE, re.DOTALL | re.UNICODE), 'single', 'u') + ] + + +class DemistoStrikethroughProcessor(UnderscoreProcessor): + """Processor for handling Strikethrough.""" + + PATTERNS = [ + EmStrongItem(re.compile(STRIKETHROUGH_RE, re.DOTALL | re.UNICODE), 'single', 's') + ] + + +class DemistoExtension(Extension): + """ Add Custom Demisto Markdown support.""" + + def extendMarkdown(self, md): + """ Modify inline patterns. """ + md.inlinePatterns.register(DemistoUnderlineProcessor(r'\+'), 'underline', 50) + md.inlinePatterns.register(DemistoStrikethroughProcessor(r'~'), 'strikethrough', 50) def get_utc_now(): @@ -409,7 +438,7 @@ def get_reply_body(notes, incident_id, attachments, reputation_calc_async=False) note_user = note['Metadata']['user'] note_userdata = demisto.executeCommand("getUserByUsername", {"username": note_user}) user_fullname = dict_safe_get(note_userdata[0], ['Contents', 'name']) or "DBot" - reply_body += f"{user_fullname}: \n{note['Contents']}\n\n" + reply_body += f"{user_fullname}: \n\n{note['Contents']}\n\n" if isinstance(attachments, str): attachments = argToList(attachments) @@ -433,12 +462,8 @@ def get_reply_body(notes, incident_id, attachments, reputation_calc_async=False) else: return_error("Please add a note") - try: - res = demisto.executeCommand("mdToHtml", {"contextKey": "replyhtmlbody", "text": reply_body}) - reply_html_body = res[0]['EntryContext']['replyhtmlbody'] - return reply_body, reply_html_body - except Exception: - return_error(get_error(res)) + reply_html_body = format_body(reply_body) + return reply_body, reply_html_body def get_email_recipients(email_to, email_from, service_mail, mailbox): @@ -616,13 +641,15 @@ def format_body(new_email_body): new_email_body (str): Email body text with or without Markdown formatting included Returns: (str) HTML email body """ - # Replace newlines with
element to preserve line breaks - new_email_body = new_email_body.replace('\n', '
') - - res = demisto.executeCommand("mdToHtml", {"text": new_email_body}) - html_body = res[0]['Contents'] - - return html_body + return markdown(new_email_body, + extensions=[ + 'tables', + 'fenced_code', + 'legacy_em', + 'sane_lists', + 'nl2br', + DemistoExtension(), + ]) def single_thread_reply(email_code, incident_id, email_cc, add_cc, notes, body_type, attachments, files, email_subject, @@ -970,8 +997,7 @@ def main(): email_selected_thread = custom_fields.get('emailselectedthread') subject_include_incident_id = argToBoolean(args.get('subject_include_incident_id', False)) body_type = args.get('bodyType') or args.get('body_type') or 'html' - - argToBoolean(args.get('reputation_calc_async', False)) + reputation_calc_async = argToBoolean(args.get('reputation_calc_async', False)) if new_email_attachments: new_attachment_names = ', '.join([attachment.get('name', '') for attachment in new_email_attachments]) @@ -982,7 +1008,7 @@ def main(): # This case is run when replying to an email from the 'Email Communication' layout single_thread_reply(email_code, incident_id, email_cc, add_cc, notes, body_type, attachments, files, email_subject, subject_include_incident_id, email_to_str, service_mail, email_latest_message, - mail_sender_instance, reputation_calc_async=False) + mail_sender_instance, reputation_calc_async) elif new_thread == 'true': # This case is run when using the 'Email Threads' layout to send a new first-contact email message @@ -997,5 +1023,5 @@ def main(): subject_include_incident_id) -if __name__ in ('__main__', '__builtin__', 'builtins'): +if __name__ in ('__main__', '__builtin__', 'builtins'): # pragma: no cover main() diff --git a/Packs/EmailCommunication/Scripts/SendEmailReply/SendEmailReply.yml b/Packs/EmailCommunication/Scripts/SendEmailReply/SendEmailReply.yml index f77ec97c5820..9be53b73c51e 100644 --- a/Packs/EmailCommunication/Scripts/SendEmailReply/SendEmailReply.yml +++ b/Packs/EmailCommunication/Scripts/SendEmailReply/SendEmailReply.yml @@ -51,7 +51,7 @@ subtype: python3 system: true type: python fromversion: 5.0.0 -dockerimage: demisto/python3:3.10.13.84405 +dockerimage: demisto/bs4-py3:1.0.0.86348 tests: - No tests (auto formatted) contentitemexportablefields: diff --git a/Packs/EmailCommunication/Scripts/SendEmailReply/SendEmailReply_test.py b/Packs/EmailCommunication/Scripts/SendEmailReply/SendEmailReply_test.py index 47cf9e80c423..d5650d4a4fbf 100644 --- a/Packs/EmailCommunication/Scripts/SendEmailReply/SendEmailReply_test.py +++ b/Packs/EmailCommunication/Scripts/SendEmailReply/SendEmailReply_test.py @@ -273,17 +273,17 @@ def test_get_query_window(list_response, expected_result, mocker): ( [{'Metadata': {'user': 'DBot'}, 'Contents': 'note1'}, {'Metadata': {'user': 'DBot'}, 'Contents': 'note2'}], [{'name': 'attachment1.png'}, {'name': 'attachment2.png'}], - "DBot: \nnote1\n\nDBot: \nnote2\n\nAttachments: ['attachment1.png', 'attachment2.png']\n\n" + "DBot: \n\nnote1\n\nDBot: \n\nnote2\n\nAttachments: ['attachment1.png', 'attachment2.png']\n\n" ), ( [{'Metadata': {'user': 'DBot'}, 'Contents': 'note1'}, {'Metadata': {'user': 'DBot'}, 'Contents': 'note2'}], [], - "DBot: \nnote1\n\nDBot: \nnote2\n\n" + "DBot: \n\nnote1\n\nDBot: \n\nnote2\n\n" ), ( [{'Metadata': {'user': 'DBot'}, 'Contents': 'note1'}, {'Metadata': {'user': 'DBot'}, 'Contents': 'note2'}], "[]", - "DBot: \nnote1\n\nDBot: \nnote2\n\n" + "DBot: \n\nnote1\n\nDBot: \n\nnote2\n\n" ) ] ) @@ -884,7 +884,7 @@ def test_main(new_thread, mocker): if new_thread == 'n/a': single_thread_reply_args = single_thread_reply_mocker.call_args expected_args = ('87654321', '10', '', 'test_cc@example.com', '', 'html', [], {}, None, False, 'end_user@company.com', - 'soc_sender@company.com', '123456', 'mail-sender-instance-1') + 'soc_sender@company.com', '123456', 'mail-sender-instance-1', False) assert single_thread_reply_args.args == expected_args elif new_thread == 'true': multi_thread_new_args = multi_thread_new_mocker.call_args @@ -897,3 +897,77 @@ def test_main(new_thread, mocker): expected_args = ('This is a test email.', 'html', '10', 1, {}, {}, 'test_cc@example.com', 'test_bcc@example.com', 'soc_sender@company.com', 'mail-sender-instance-1', 'None', False) assert multi_thread_reply_args.args == expected_args + + +# Parametrized test for happy path scenarios with various realistic markdown inputs +@pytest.mark.parametrize("input_md, expected_html, test_id", [ + # Test ID: #1 - Simple text conversion + ("Hello, World!", "

Hello, World!

", "simple_text"), + + # Test ID: #2 - Header conversion + ("# Header 1", "

Header 1

", "header_conversion"), + + # Test ID: #3 - Table conversion + ("| Header1 | Header2 |\n| ------- | ------- |\n| Cell1 | Cell2 |", + "\n\n\n\n\n\n\n\n\n\n" + "\n\n\n
Header1Header2
Cell1Cell2
", + "table_conversion"), + + # Test ID: #4 - Emphasis conversion using legacy syntax + ("_italic_ **bold**", + "

italic bold

", + "emphasis_conversion"), + + # Test ID: #5 - List conversion + ("- Item 1\n- Item 2", + "", + "list_conversion"), + + # Test ID: #6 - New lines to
conversion + ("Line 1\nLine 2", + "

Line 1
\nLine 2

", + "newline_to_br_conversion"), +], ids=lambda test_id: test_id) +def test_format_body_happy_path(input_md, expected_html, test_id): + # Act + from SendEmailReply import format_body + result = format_body(input_md) + + # Assert + assert result == expected_html, f"Test failed for {test_id}" + + +# Parametrized test for edge cases +@pytest.mark.parametrize("input_md, expected_html, test_id", [ + # Test ID: #1 - Empty string + ("", "", "empty_string"), + + # Test ID: #2 - Markdown with only special characters + ("# $%^&*()", + "

$%^&*()

", + "special_characters_only"), +], ids=lambda test_id: test_id) +def test_format_body_edge_cases(input_md, expected_html, test_id): + # Act + from SendEmailReply import format_body + result = format_body(input_md) + + # Assert + assert result == expected_html, f"Test failed for {test_id}" + + +# Parametrized test for edge cases +@pytest.mark.parametrize("input_md, expected_html, test_id", [ + # Test ID: #1 - Demisto custom markdown underline syntax. + ("+underline+", "

underline

", "underline"), + + # Test ID: #2 - Demisto custom markdown strikethrough syntax. + ("~~strikethrough~~", "

strikethrough

", "strikethrough"), +], ids=lambda test_id: test_id) +def test_demisto_custom_markdown_syntax(input_md, expected_html, test_id): + # Act + from SendEmailReply import format_body + result = format_body(input_md) + + # Assert + assert result == expected_html, f"Test failed for {test_id}" diff --git a/Packs/EmailCommunication/pack_metadata.json b/Packs/EmailCommunication/pack_metadata.json index 7a3d6a41e1c7..426d51ca0886 100644 --- a/Packs/EmailCommunication/pack_metadata.json +++ b/Packs/EmailCommunication/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Email Communication", "description": "Do you have to send multiple emails to end users? This content pack helps you streamline the process and automate updates, notifications and more.\n", "support": "xsoar", - "currentVersion": "2.0.23", + "currentVersion": "2.0.24", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "videos": [ diff --git a/Packs/ExtraHop/Integrations/ExtraHop_v2/ExtraHop_v2.yml b/Packs/ExtraHop/Integrations/ExtraHop_v2/ExtraHop_v2.yml index 7bd471760849..b42f1b506a8d 100644 --- a/Packs/ExtraHop/Integrations/ExtraHop_v2/ExtraHop_v2.yml +++ b/Packs/ExtraHop/Integrations/ExtraHop_v2/ExtraHop_v2.yml @@ -4536,7 +4536,7 @@ script: deprecated: true description: Use extrahop-packets-search instead. Search for specific packets in Reveal(x). name: extrahop-search-packets - dockerimage: demisto/python3:3.10.13.84405 + dockerimage: demisto/python3:3.10.13.86272 isfetch: true runonce: false script: '-' diff --git a/Packs/ExtraHop/ReleaseNotes/2_2_4.md b/Packs/ExtraHop/ReleaseNotes/2_2_4.md new file mode 100644 index 000000000000..00794c8c27b7 --- /dev/null +++ b/Packs/ExtraHop/ReleaseNotes/2_2_4.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### ExtraHop Reveal(x) + +- Updated the Docker image to: *demisto/python3:3.10.13.86272*. diff --git a/Packs/ExtraHop/pack_metadata.json b/Packs/ExtraHop/pack_metadata.json index 58ef62677be0..e7f60cddc002 100644 --- a/Packs/ExtraHop/pack_metadata.json +++ b/Packs/ExtraHop/pack_metadata.json @@ -2,10 +2,10 @@ "name": "ExtraHop Reveal(x)", "description": "Network detection and response. Complete visibility of network communications at enterprise scale, real-time threat detections backed by machine learning, and guided investigation workflows that simplify response.", "support": "partner", - "currentVersion": "2.2.3", + "currentVersion": "2.2.4", "author": "ExtraHop", - "url": "", - "email": "support@extrahop.com", + "url": "https://customer.extrahop.com/s/", + "email": "", "created": "2020-04-14T00:00:00Z", "categories": [ "Network Security" diff --git a/Packs/FeedAWS/Integrations/FeedAWS/FeedAWS.yml b/Packs/FeedAWS/Integrations/FeedAWS/FeedAWS.yml index 1d749211ef99..059736c8569b 100644 --- a/Packs/FeedAWS/Integrations/FeedAWS/FeedAWS.yml +++ b/Packs/FeedAWS/Integrations/FeedAWS/FeedAWS.yml @@ -162,7 +162,7 @@ script: name: limit description: Fetches indicators from the feed. name: aws-get-indicators - dockerimage: demisto/py3-tools:1.0.0.84811 + dockerimage: demisto/py3-tools:1.0.0.86691 feed: true runonce: false script: '-' diff --git a/Packs/FeedAWS/ReleaseNotes/1_1_48.md b/Packs/FeedAWS/ReleaseNotes/1_1_48.md new file mode 100644 index 000000000000..ee060f7d2ba9 --- /dev/null +++ b/Packs/FeedAWS/ReleaseNotes/1_1_48.md @@ -0,0 +1,3 @@ +#### Integrations +##### AWS Feed +- Updated the Docker image to: *demisto/py3-tools:1.0.0.86691*. diff --git a/Packs/FeedAWS/pack_metadata.json b/Packs/FeedAWS/pack_metadata.json index 760510b94043..a212d048b119 100644 --- a/Packs/FeedAWS/pack_metadata.json +++ b/Packs/FeedAWS/pack_metadata.json @@ -2,7 +2,7 @@ "name": "AWS Feed", "description": "Indicators feed from AWS", "support": "xsoar", - "currentVersion": "1.1.47", + "currentVersion": "1.1.48", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/FeedBambenekConsulting/.secrets-ignore b/Packs/FeedBambenekConsulting/.secrets-ignore index b319a38cd80b..bc752abfd3f8 100644 --- a/Packs/FeedBambenekConsulting/.secrets-ignore +++ b/Packs/FeedBambenekConsulting/.secrets-ignore @@ -25,3 +25,6 @@ e::Ba /feeds/dga/c2-masterlist.txt https://us-cert.cisa.gov https://osint.bambenekconsulting.com/feeds/ +sales@bambenekconsulting.com +23.82.12.29 +http://osint.bambenekconsulting.com diff --git a/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/FeedBambenekConsulting.py b/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/FeedBambenekConsulting.py index a315df206047..1599ff1588d2 100644 --- a/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/FeedBambenekConsulting.py +++ b/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/FeedBambenekConsulting.py @@ -1,153 +1,164 @@ from CommonServerPython import * name_to_url = { - 'C2 IP Feed': 'https://faf.bambenekconsulting.com/feeds/dga/c2-ipmasterlist.txt', - 'High-Confidence C2 IP Feed': 'http://osint.bambenekconsulting.com/feeds/c2-ipmasterlist-high.txt', - 'C2 Domain Feed': 'https://faf.bambenekconsulting.com/feeds/dga/c2-dommasterlist.txt', - 'High-Confidence C2 Domain Feed': 'http://osint.bambenekconsulting.com/feeds/c2-dommasterlist-high.txt', - 'DGA Domain Feed': 'https://faf.bambenekconsulting.com/feeds/dga-feed.gz', - 'High-Confidence DGA Domain Feed': 'https://faf.bambenekconsulting.com/feeds/dga-feed-high.gz', - 'C2 All Indicator Feed': 'https://faf.bambenekconsulting.com/feeds/dga/c2-masterlist.txt', - 'High-Confidence C2 All Indicator Feed': 'https://faf.bambenekconsulting.com/feeds/dga/c2-masterlist-high.txt', - 'Sinkhole Feed': 'https://faf.bambenekconsulting.com/feeds/sinkhole/latest.csv' + "C2 IP Feed": "https://faf.bambenekconsulting.com/feeds/dga/c2-ipmasterlist.txt", + "High-Confidence C2 IP Feed": "http://osint.bambenekconsulting.com/feeds/c2-ipmasterlist-high.txt", + "C2 Domain Feed": "https://faf.bambenekconsulting.com/feeds/dga/c2-dommasterlist.txt", + "High-Confidence C2 Domain Feed": "http://osint.bambenekconsulting.com/feeds/c2-dommasterlist-high.txt", + "DGA Domain Feed": "https://faf.bambenekconsulting.com/feeds/dga-feed.gz", + "High-Confidence DGA Domain Feed": "https://faf.bambenekconsulting.com/feeds/dga-feed-high.gz", + "C2 All Indicator Feed": "https://faf.bambenekconsulting.com/feeds/dga/c2-masterlist.txt", + "High-Confidence C2 All Indicator Feed": "https://faf.bambenekconsulting.com/feeds/dga/c2-masterlist-high.txt", + "Sinkhole Feed": "https://faf.bambenekconsulting.com/feeds/sinkhole/latest.csv", + "Malware Domains Feed": "https://faf.bambenekconsulting.com/feeds/maldomainml/malware-master.txt", + "Phishing Domains Feed": "https://faf.bambenekconsulting.com/feeds/maldomainml/phishing-master.txt", } def main(): feed_url_to_config = { - 'https://faf.bambenekconsulting.com/feeds/dga/c2-ipmasterlist.txt': { - 'fieldnames': ['value', 'description', - 'date_created', - 'info'], - 'indicator_type': FeedIndicatorType.IP, - 'relationship_name': EntityRelationship.Relationships.INDICATOR_OF, - 'relationship_entity_b_type': 'STIX Malware', - 'mapping': { - 'description': 'description', - 'malwarefamily': ('description', r'.*used\s+by\s(.*?)\s', None), - 'relationship_entity_b': ('description', r'.*used\s+by\s(.*?)\s', None), - } + "https://faf.bambenekconsulting.com/feeds/dga/c2-ipmasterlist.txt": { + "fieldnames": ["value", "description", "date_created", "info"], + "indicator_type": FeedIndicatorType.IP, + "relationship_name": EntityRelationship.Relationships.INDICATOR_OF, + "relationship_entity_b_type": "STIX Malware", + "mapping": { + "description": "description", + "malwarefamily": ("description", r".*used\s+by\s(.*?)\s", None), + "relationship_entity_b": ("description", r".*used\s+by\s(.*?)\s", None), + }, }, - - 'https://faf.bambenekconsulting.com/feeds/dga/c2-dommasterlist.txt': { - 'fieldnames': ['value', 'description', - 'date_created', - 'info'], - 'indicator_type': FeedIndicatorType.Domain, - 'relationship_name': EntityRelationship.Relationships.INDICATOR_OF, - 'relationship_entity_b_type': 'STIX Malware', - 'mapping': { - 'description': 'description', - 'malwarefamily': ('description', r'.*used\s+by\s(.*?)\s', None), - 'relationship_entity_b': ('description', r'.*used\s+by\s(.*?)$', None) - } + "https://faf.bambenekconsulting.com/feeds/dga/c2-dommasterlist.txt": { + "fieldnames": ["value", "description", "date_created", "info"], + "indicator_type": FeedIndicatorType.Domain, + "relationship_name": EntityRelationship.Relationships.INDICATOR_OF, + "relationship_entity_b_type": "STIX Malware", + "mapping": { + "description": "description", + "malwarefamily": ("description", r".*used\s+by\s(.*?)\s", None), + "relationship_entity_b": ("description", r".*used\s+by\s(.*?)$", None), + }, }, - 'http://osint.bambenekconsulting.com/feeds/c2-ipmasterlist-high.txt': { - 'fieldnames': ['value', 'description', - 'date_created', - 'info'], - 'indicator_type': FeedIndicatorType.IP, - 'relationship_name': EntityRelationship.Relationships.INDICATOR_OF, - 'relationship_entity_b_type': 'STIX Malware', - 'mapping': { - 'description': 'description', - 'malwarefamily': ('description', r'.*used\s+by\s(.*?)\s', None), - 'relationship_entity_b': ('description', r'.*used\s+by\s(.*?)\s', None) - } + "http://osint.bambenekconsulting.com/feeds/c2-ipmasterlist-high.txt": { + "fieldnames": ["value", "description", "date_created", "info"], + "indicator_type": FeedIndicatorType.IP, + "relationship_name": EntityRelationship.Relationships.INDICATOR_OF, + "relationship_entity_b_type": "STIX Malware", + "mapping": { + "description": "description", + "malwarefamily": ("description", r".*used\s+by\s(.*?)\s", None), + "relationship_entity_b": ("description", r".*used\s+by\s(.*?)\s", None), + }, }, - 'http://osint.bambenekconsulting.com/feeds/c2-dommasterlist-high.txt': { - 'fieldnames': ['value', 'description', - 'date_created', - 'info'], - 'indicator_type': FeedIndicatorType.Domain, - 'relationship_name': EntityRelationship.Relationships.INDICATOR_OF, - 'relationship_entity_b_type': 'STIX Malware', - 'mapping': { - 'description': 'description', - 'malwarefamily': ('description', r'.*used\s+by\s(.*?)\s', None), - 'relationship_entity_b': ('description', r'.*used\s+by\s(.*?)\s', None), - - } + "http://osint.bambenekconsulting.com/feeds/c2-dommasterlist-high.txt": { + "fieldnames": ["value", "description", "date_created", "info"], + "indicator_type": FeedIndicatorType.Domain, + "relationship_name": EntityRelationship.Relationships.INDICATOR_OF, + "relationship_entity_b_type": "STIX Malware", + "mapping": { + "description": "description", + "malwarefamily": ("description", r".*used\s+by\s(.*?)\s", None), + "relationship_entity_b": ("description", r".*used\s+by\s(.*?)\s", None), + }, }, - 'https://faf.bambenekconsulting.com/feeds/dga-feed.gz': { - 'fieldnames': ['value', 'description', - 'date_created', - 'info'], - 'indicator_type': FeedIndicatorType.Domain, - 'relationship_name': EntityRelationship.Relationships.INDICATOR_OF, - 'relationship_entity_b_type': 'STIX Malware', - 'mapping': { - 'description': 'description', - 'malwarefamily': ('description', r'.*used\s+by\s(.*?)(\(|DGA)', None), - 'relationship_entity_b': ('description', r'.*used\s+by\s(.*?)(\(|DGA)', None), + "https://faf.bambenekconsulting.com/feeds/dga-feed.gz": { + "fieldnames": ["value", "description", "date_created", "info"], + "indicator_type": FeedIndicatorType.Domain, + "relationship_name": EntityRelationship.Relationships.INDICATOR_OF, + "relationship_entity_b_type": "STIX Malware", + "mapping": { + "description": "description", + "malwarefamily": ("description", r".*used\s+by\s(.*?)(\(|DGA)", None), + "relationship_entity_b": ( + "description", + r".*used\s+by\s(.*?)(\(|DGA)", + None, + ), }, - 'is_zipped_file': True + "is_zipped_file": True, }, - 'https://faf.bambenekconsulting.com/feeds/dga-feed-high.gz': { - 'fieldnames': ['value', 'description', - 'date_created', - 'info'], - 'indicator_type': FeedIndicatorType.Domain, - 'relationship_name': EntityRelationship.Relationships.INDICATOR_OF, - 'relationship_entity_b_type': 'STIX Malware', - 'mapping': { - 'description': 'description', - 'malwarefamily': ('description', r'.*used\s+by\s(.*?)\s', None), - 'relationship_entity_b': ('description', r'.*used\s+by\s(.*?)\s', None) + "https://faf.bambenekconsulting.com/feeds/dga-feed-high.gz": { + "fieldnames": ["value", "description", "date_created", "info"], + "indicator_type": FeedIndicatorType.Domain, + "relationship_name": EntityRelationship.Relationships.INDICATOR_OF, + "relationship_entity_b_type": "STIX Malware", + "mapping": { + "description": "description", + "malwarefamily": ("description", r".*used\s+by\s(.*?)\s", None), + "relationship_entity_b": ("description", r".*used\s+by\s(.*?)\s", None), }, - 'is_zipped_file': True + "is_zipped_file": True, }, - 'https://faf.bambenekconsulting.com/feeds/dga/c2-masterlist.txt': { - 'fieldnames': ['value', - 'ip', - 'nsname', - 'nsip', - 'description', - 'info'], - 'indicator_type': FeedIndicatorType.Domain, - 'relationship_entity_b_type': FeedIndicatorType.IP, - 'relationship_name': EntityRelationship.Relationships.RESOLVED_FROM, - 'mapping': { - 'ipaddress': 'ip', - 'relationship_entity_b': 'ip' - } + "https://faf.bambenekconsulting.com/feeds/dga/c2-masterlist.txt": { + "fieldnames": ["value", "ip", "nsname", "nsip", "description", "info"], + "indicator_type": FeedIndicatorType.Domain, + "relationship_entity_b_type": FeedIndicatorType.IP, + "relationship_name": EntityRelationship.Relationships.RESOLVED_FROM, + "mapping": {"ipaddress": "ip", "relationship_entity_b": "ip"}, }, - 'https://faf.bambenekconsulting.com/feeds/dga/c2-masterlist-high.txt': { - 'fieldnames': ['value', - 'ip', - 'nsname', - 'nsip', - 'description', - 'info'], - 'indicator_type': FeedIndicatorType.Domain, - 'relationship_name': EntityRelationship.Relationships.RESOLVED_FROM, - 'relationship_entity_b_type': FeedIndicatorType.IP, - 'mapping': { - 'ipaddress': 'ip', - 'relationship_entity_b': 'ip' - } + "https://faf.bambenekconsulting.com/feeds/dga/c2-masterlist-high.txt": { + "fieldnames": ["value", "ip", "nsname", "nsip", "description", "info"], + "indicator_type": FeedIndicatorType.Domain, + "relationship_name": EntityRelationship.Relationships.RESOLVED_FROM, + "relationship_entity_b_type": FeedIndicatorType.IP, + "mapping": {"ipaddress": "ip", "relationship_entity_b": "ip"}, + }, + "https://faf.bambenekconsulting.com/feeds/sinkhole/latest.csv": { + "fieldnames": ["value", "owner"], + "indicator_type": FeedIndicatorType.IP, + "mapping": {"description": ("owner", None, "Sinkholed by {}")}, + }, + "https://faf.bambenekconsulting.com/feeds/maldomainml/malware-master.txt": { + "fieldnames": [ + "hostname", + "registered_domain", + "ipv4 address", + "asn", + "netblock", + "description", + ], + "indicator_type": FeedIndicatorType.Domain, + "relationship_name": EntityRelationship.Relationships.INDICATOR_OF, + "relationship_entity_b_type": "STIX Malware", + "mapping": { + "description": "description", + "ipaddress": "ipv4 address", + "malwarefamily": ("description", r".*used\s+by\s(.*?)\s", None), + "relationship_entity_b": ("description", r".*used\s+by\s(.*?)$", None), + }, + }, + "https://faf.bambenekconsulting.com/feeds/maldomainml/phishing-master.txt": { + "fieldnames": [ + "hostname", + "registered_domain", + "ipv4 address", + "asn", + "netblock", + "description", + ], + "indicator_type": FeedIndicatorType.Domain, + "relationship_name": EntityRelationship.Relationships.INDICATOR_OF, + "relationship_entity_b_type": "STIX Malware", + "mapping": { + "description": "description", + "ipaddress": "ipv4 address", + "malwarefamily": ("description", r".*used\s+by\s(.*?)\s", None), + "relationship_entity_b": ("description", r".*used\s+by\s(.*?)$", None), + }, }, - 'https://faf.bambenekconsulting.com/feeds/sinkhole/latest.csv': { - 'fieldnames': ['value', - 'owner'], - 'indicator_type': FeedIndicatorType.IP, - 'mapping': { - 'description': ('owner', None, 'Sinkholed by {}') - } - } } params = {k: v for k, v in demisto.params().items() if v is not None} - params['url'] = [name_to_url.get(url) for url in argToList(params.get('url'))] - params['feed_url_to_config'] = feed_url_to_config - params['ignore_regex'] = r'^#' - params['delimiter'] = ',' + params["url"] = [name_to_url.get(url) for url in argToList(params.get("url"))] + params["feed_url_to_config"] = feed_url_to_config + params["ignore_regex"] = r"^#" + params["delimiter"] = "," # Main execution of the CSV API Module. # This function allows to add to or override this execution. - feed_main('Bambenek Consulting Feed', params, 'bambenek') + feed_main("Bambenek Consulting Feed", params, "bambenek") from CSVFeedApiModule import * # noqa: E402 -if __name__ in ('__builtin__', 'builtins', '__main__'): +if __name__ in ("__builtin__", "builtins", "__main__"): main() diff --git a/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/FeedBambenekConsulting.yml b/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/FeedBambenekConsulting.yml index 0ac5ecf7a04f..959b393aa707 100644 --- a/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/FeedBambenekConsulting.yml +++ b/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/FeedBambenekConsulting.yml @@ -92,6 +92,8 @@ configuration: - DGA Domain Feed - High-Confidence DGA Domain Feed - Sinkhole Feed + - Malware Domains Feed + - Phishing Domains Feed required: true type: 16 - display: Username @@ -123,7 +125,7 @@ script: name: indicator_type description: Gets the feed indicators. name: bambenek-get-indicators - dockerimage: demisto/python3:3.10.12.63474 + dockerimage: demisto/python3:3.10.13.86272 feed: true runonce: false script: '-' diff --git a/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/FeedBambenekConsulting_test.py b/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/FeedBambenekConsulting_test.py new file mode 100644 index 000000000000..1501df5bf846 --- /dev/null +++ b/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/FeedBambenekConsulting_test.py @@ -0,0 +1,86 @@ +import demistomock as demisto +from unittest.mock import MagicMock + +import csv +from io import StringIO + +data = { + "value": "23.82.12.29", + "description": "IP used by beebone C&C", + "date_created": "2023-12-18 08:06", + "info": "http://osint.bambenekconsulting.com/manual/beebone.txt", +} + +# Convert the dictionary to a CSV string +csv_string = StringIO() +csv_writer = csv.DictWriter(csv_string, fieldnames=data.keys()) +csv_writer.writeheader() +csv_writer.writerow(data) +csv_data = csv_string.getvalue() +csv_string.close() + +# Convert the CSV string to a csv.DictReader object +csv_stringio = StringIO(csv_data) +csv_reader = csv.DictReader(csv_stringio) + + +def test_fetch_indicators_main(mocker): + """ + Given + - indicators response from bambenek consulting feed + + When + - Running main flow for fetching indicators command + + Then + - Ensure that all indicators values exist and are not 'None' + """ + from FeedBambenekConsulting import main + + mocker.patch.object( + demisto, + "params", + return_value={ + "feed": True, + "feedBypassExclusionList": False, + "feedExpirationInterval": "20160", + "feedExpirationPolicy": "suddenDeath", + "feedFetchInterval": 1, + "feedReliability": "A - Completely reliable", + "feedReputation": "None", + "feedTags": None, + "insecure": True, + "proxy": False, + "tlp_color": None, + "url": "https://faf.bambenekconsulting.com/", + }, + ) + mocker.patch.object(demisto, "command", return_value="fetch-indicators") + create_indicators_mocker = mocker.patch.object(demisto, "createIndicators") + API_CLIENT_MOCK = MagicMock() + API_CLIENT_MOCK.build_iterator.return_value = [ + { + "https://faf.bambenekconsulting.com/feeds/dga/c2-ipmasterlist.txt": { + "result": csv_reader, + "no_update": False, + } + } + ] + mocker.patch("CSVFeedApiModule.Client", return_value=API_CLIENT_MOCK) + main() + assert ( + create_indicators_mocker.call_args.args[0][0]["rawJSON"]["value"] + == "23.82.12.29" + ) + assert ( + create_indicators_mocker.call_args.args[0][0]["rawJSON"]["description"] + == "IP used by beebone C&C" + ) + assert ( + create_indicators_mocker.call_args.args[0][0]["rawJSON"]["date_created"] + == "2023-12-18 08:06" + ) + assert ( + create_indicators_mocker.call_args.args[0][0]["rawJSON"]["info"] + == "http://osint.bambenekconsulting.com/manual/beebone.txt" + ) diff --git a/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/README.md b/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/README.md index bc45b0681220..542c51ff9cbe 100644 --- a/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/README.md +++ b/Packs/FeedBambenekConsulting/Integrations/FeedBambenekConsulting/README.md @@ -1,10 +1,12 @@ ## Overview + --- Use the Bambenek Consulting feed integration to fetch indicators from the feed. ## Configure Bambenek Consulting Feed on Cortex XSOAR + --- 1. Navigate to __Settings__ > __Integrations__ > __Servers & Services__. @@ -18,16 +20,18 @@ Use the Bambenek Consulting feed integration to fetch indicators from the feed. * High-Confidence C2 Domain Feed - Master Feed of known, active, and non-sinkholed C&Cs domain names (high-confidence only). * C2 All Indicator Feed - Master list feed of all current C&C domains using DGAs. * High-Confidence C2 All Indicator Feed - Master list feed of all current C&C domains using DGAs (high-confidence only). - * DGA Domain Feed - Domain feed of known DGA domains from -2 to +3 days. - * High-Confidence DGA Domain Feed - Domain feed of known DGA domains from -2 to +3 days (high-confidence only). - * Sinkhole Feed - Manually curated list of IPs known to be sinkholes, provided by Bambenek Consulting. Sinkholing is a technique where security researchers or security companies take over network infrastructure used by malware. - * **Username + Password** - Credentials to access services that require basic authentication. + * DGA Domain Feed - A self-curating feed that monitors malicious networks to observe current criminal activity. All domains are actionable. Live data of between 750 and 1,500 domains. which are used by 65 malware families and nearly 1 million domains. Limited to current relevance. + * High-Confidence DGA Domain Feed - A self-curating feed that monitors malicious networks to observe current criminal activity. All domains are actionable. Live data of between 750 and 1,500 domains. which are used by 65 malware families and nearly 1 million domains. Limited to current relevance. High-confidence data, extremely low false-positives. + * Sinkhole Feed - A manually-curated list of over 1,500 known sinkholes. The feed is used to capture traffic headed toward criminal destinations. Catch traffic headed toward them, and you know you have an infected machine. + * Malware Domains Feed - A feed based on machine learning and analytic methods of DNS telemetry developed in Bambenek Labs. Identifies malware hostnames used primarily for criminal purposes. Data is extremely safe to use to proactively protect networks. + * Phishing Domains Feed - A feed based on machine learning and analytic methods of DNS telemetry developed in Bambenek Labs. Identifies phishing hostnames used primarily for criminal purposes. Data is extremely safe to use to proactively protect networks. + * __Username + Password__ - Credentials to access services that require basic authentication. These fields also support the use of API key headers. To use API key headers, specify the header name and value in the following format: - `_header:` in the **Username** field and the header value in the **Password** field. + `_header:` in the __Username__ field and the header value in the __Password__ field. * __Fetch indicators__: boolean flag. If set to true will fetch indicators. * __Fetch Interval__: Interval of the fetches. * __Reliability__: Reliability of the feed. - * __Traffic Light Protocol color__: The Traffic Light Protocol (TLP) designation to apply to indicators fetched from the feed. More information about the protocol can be found at https://us-cert.cisa.gov/tlp + * __Traffic Light Protocol color__: The Traffic Light Protocol (TLP) designation to apply to indicators fetched from the feed. More information about the protocol can be found at * __Skip Exclusion List__: When selected, the exclusion list is ignored for indicators from this feed. This means that if an indicator from this feed is on the exclusion list, the indicator might still be added to the system. @@ -36,21 +40,6 @@ Use the Bambenek Consulting feed integration to fetch indicators from the feed. * __Request Timeout__: Timeout of the polling request in seconds. 4. Click __Test__ to validate the URLs, token, and connection. -## Troubleshooting ---- -Bambenek Consulting has two license types: Commercial and Non-Commercial, each of which have specific feeds available. - -List of commercial feeds: -* DGA Domain Feed -* High-Confidence DGA Domain Feed -* C2 All Indicator Feed -* High-Confidence C2 All Indicator Feed -* Sinkhole Feed - -List of non-commercial feeds: -* C2 IP Feed -* High-Confidence C2 IP Feed -* C2 Domain Feed -* High-Confidence C2 Domain Feed - -For more information visit [Bambenek Consulting Feeds](https://osint.bambenekconsulting.com/feeds/) +## Gain Access + +Get a quote and subscribe: sales@bambenekconsulting.com \ No newline at end of file diff --git a/Packs/FeedBambenekConsulting/ReleaseNotes/1_2_0.md b/Packs/FeedBambenekConsulting/ReleaseNotes/1_2_0.md new file mode 100644 index 000000000000..83fb80de9887 --- /dev/null +++ b/Packs/FeedBambenekConsulting/ReleaseNotes/1_2_0.md @@ -0,0 +1,8 @@ + +#### Integrations + +##### Bambenek Consulting Feed + +- Added support for **Malware Domains** Feed. +- Added support for **Phishing Domains** Feed. +- Updated the Docker image to: *demisto/python3:3.10.13.86272*. diff --git a/Packs/FeedBambenekConsulting/TestPlaybooks/playbook-FeedBambenekConsulting.yml b/Packs/FeedBambenekConsulting/TestPlaybooks/playbook-FeedBambenekConsulting.yml index e19e72eda19e..0ecc56821d8f 100644 --- a/Packs/FeedBambenekConsulting/TestPlaybooks/playbook-FeedBambenekConsulting.yml +++ b/Packs/FeedBambenekConsulting/TestPlaybooks/playbook-FeedBambenekConsulting.yml @@ -15,6 +15,7 @@ tasks: name: "" iscommand: false brand: "" + description: '' nexttasks: '#none#': - "4" @@ -188,3 +189,4 @@ inputs: [] outputs: [] sourceplaybookid: BambenekConsultingFeed_Test fromversion: 5.5.0 +description: '' diff --git a/Packs/FeedBambenekConsulting/pack_metadata.json b/Packs/FeedBambenekConsulting/pack_metadata.json index d0fd33e68325..53dbafb288c7 100644 --- a/Packs/FeedBambenekConsulting/pack_metadata.json +++ b/Packs/FeedBambenekConsulting/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Bambenek Consulting Feed", "description": "Indicators feed from Bambenek Consulting", "support": "xsoar", - "currentVersion": "1.1.27", + "currentVersion": "1.2.0", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphMail/MicrosoftGraphMail.yml b/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphMail/MicrosoftGraphMail.yml index 8309bf623f14..b12052bffd17 100644 --- a/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphMail/MicrosoftGraphMail.yml +++ b/Packs/MicrosoftGraphMail/Integrations/MicrosoftGraphMail/MicrosoftGraphMail.yml @@ -1141,7 +1141,7 @@ script: required: true description: Delete a specific email rule by ID for a user's mailbox using Microsoft Graph API. name: msgraph-mail-delete-rule - dockerimage: demisto/crypto:1.0.0.83343 + dockerimage: demisto/crypto:1.0.0.86361 isfetch: true runonce: false script: '-' diff --git a/Packs/MicrosoftGraphMail/ReleaseNotes/1_6_3.md b/Packs/MicrosoftGraphMail/ReleaseNotes/1_6_3.md new file mode 100644 index 000000000000..b30ae1fda5f4 --- /dev/null +++ b/Packs/MicrosoftGraphMail/ReleaseNotes/1_6_3.md @@ -0,0 +1,3 @@ +#### Integrations +##### O365 Outlook Mail (Using Graph API) +- Updated the Docker image to: *demisto/crypto:1.0.0.86361*. diff --git a/Packs/MicrosoftGraphMail/pack_metadata.json b/Packs/MicrosoftGraphMail/pack_metadata.json index 60e4e2968884..34c3c8de8611 100644 --- a/Packs/MicrosoftGraphMail/pack_metadata.json +++ b/Packs/MicrosoftGraphMail/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Microsoft Graph Mail", "description": "Microsoft Graph lets your app get authorized access to a user's Outlook mail data in a personal or organization account.", "support": "xsoar", - "currentVersion": "1.6.2", + "currentVersion": "1.6.3", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/WebFileRepository/Integrations/WebFileRepository/README.md b/Packs/WebFileRepository/Integrations/WebFileRepository/README.md index 978b95207450..5167314b2a96 100644 --- a/Packs/WebFileRepository/Integrations/WebFileRepository/README.md +++ b/Packs/WebFileRepository/Integrations/WebFileRepository/README.md @@ -247,6 +247,35 @@ Download a file from the repository There is no context output for this command. +### wfr-download-as-text + +*** +Retrieve the file data from the repository into the context. + + +#### Base Command + +`wfr-download-as-text` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| path | The file path. | Required | +| encoding | Encoding type to convert the file data when setting to the context. Default is utf-8. | Optional | + + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| WebFileRepository.Files.Name | string | The file name | +| WebFileRepository.Files.Path | string | The file path | +| WebFileRepository.Files.Size | number | The file size in bytes | +| WebFileRepository.Files.Data | string | The file data encoded in the encoding | +| WebFileRepository.Files.Encoding | string | The encoding name | + + ### wfr-archive-zip *** diff --git a/Packs/WebFileRepository/Integrations/WebFileRepository/WebFileRepository.py b/Packs/WebFileRepository/Integrations/WebFileRepository/WebFileRepository.py index 6aaf7771b0b2..8828cb25c592 100644 --- a/Packs/WebFileRepository/Integrations/WebFileRepository/WebFileRepository.py +++ b/Packs/WebFileRepository/Integrations/WebFileRepository/WebFileRepository.py @@ -20,7 +20,7 @@ from enum import Enum from tempfile import NamedTemporaryFile from typing import (IO, Any) -from collections.abc import Callable, Generator +from collections.abc import Callable, Iterator import bottle from bottle import BaseRequest, HTTPResponse @@ -2917,17 +2917,23 @@ class Settings: @staticmethod - def parse_attachment_exts(text: str) -> set[str]: + def parse_attachment_exts( + text: str + ) -> set[str]: """ Parse a text to build a attachment extentions. :param text: A attachment extentions configuration :return: A set of extentions. """ - return {ext if ext == '*' or ext.startswith('.') else f'.{ext}' - for ext in text.replace(',', ' ').split()} + return { + ext if ext == '*' or ext.startswith('.') else f'.{ext}' + for ext in text.replace(',', ' ').split() + } @staticmethod - def parse_mime_types(text: str) -> dict[str, str]: + def parse_mime_types( + text: str + ) -> dict[str, str]: """ Parse a text to build a mime type mapping to extensions :param text: A mapping configuration @@ -2948,7 +2954,9 @@ def parse_mime_types(text: str) -> dict[str, str]: return mapping @staticmethod - def parse_human_size(size: str) -> int | None: + def parse_human_size( + size: str + ) -> int | None: """ Parse a human readable size string :return: Size in bytes @@ -2960,7 +2968,10 @@ def parse_human_size(size: str) -> int | None: UNITS = {None: 1, 'B': 1, 'KB': 2**10, 'MB': 2**20, 'GB': 2**30, 'TB': 2**40} return int(float(num) * UNITS[unit]) - def __init__(self, params: dict[str, Any]): + def __init__( + self, + params: dict[str, Any] + ) -> None: max_storage_size_str = params.get('maxStorageSize') or '100 MB' if (max_storage_size := Settings.parse_human_size(max_storage_size_str)) is None: raise DemistoException('Invalid max storage size') @@ -2981,10 +2992,12 @@ def __init__(self, params: dict[str, Any]): self.__public_read_access = argToBoolean(params.get('publicReadAccess', 'true')) storage_protection = params.get('storageProtection') or 'read/write' - self.__storage_protection = {'read/write': STORAGE_PROTECTION.READ_WRITE, - 'read-only': STORAGE_PROTECTION.READ_ONLY, - 'sandbox': STORAGE_PROTECTION.SANDBOX, - }.get(storage_protection) + self.__storage_protection = { + 'read/write': STORAGE_PROTECTION.READ_WRITE, + 'read-only': STORAGE_PROTECTION.READ_ONLY, + 'sandbox': STORAGE_PROTECTION.SANDBOX, + }.get(storage_protection) + if self.__storage_protection is None: raise DemistoException(f'Invalid storage protection mode: {storage_protection}') @@ -3001,7 +3014,10 @@ def __init__(self, params: dict[str, Any]): self.__ro_username = creds.get('identifier') or '' self.__ro_password = creds.get('password') or '' - def get_user_password(self, username: str | None) -> str | None: + def get_user_password( + self, + username: str | None + ) -> str | None: if username == self.__rw_username: return self.__rw_password elif username == self.__ro_username: @@ -3009,7 +3025,10 @@ def get_user_password(self, username: str | None) -> str | None: else: return None - def get_user_permissions(self, username: str | None) -> set[PERMISSION]: + def get_user_permissions( + self, + username: str | None + ) -> set[PERMISSION]: if username == self.__rw_username: return set({PERMISSION.READ, PERMISSION.WRITE}) elif username == self.__ro_username: @@ -3017,7 +3036,10 @@ def get_user_permissions(self, username: str | None) -> set[PERMISSION]: else: return set() - def get_content_type_from_file_extension(self, ext: str) -> str: + def get_content_type_from_file_extension( + self, + ext: str + ) -> str: if content_type := self.__ext_to_mimetype.get(ext): return content_type else: @@ -3026,59 +3048,88 @@ def get_content_type_from_file_extension(self, ext: str) -> str: return content_type return 'application/octet-stream' - def is_attachment_file_extension(self, ext: str) -> bool: - return ext in self.__attachment_exts or \ - any(fnmatch.fnmatch(ext, pattern) for pattern in self.__attachment_exts) + def is_attachment_file_extension( + self, + ext: str + ) -> bool: + return ( + ext in self.__attachment_exts + + or any(fnmatch.fnmatch(ext, pattern) for pattern in self.__attachment_exts) + ) @property - def host_port(self) -> int: + def host_port( + self + ) -> int: return self.__host_port @property - def docker_port(self) -> int: + def docker_port( + self + ) -> int: return self.__docker_port @property - def attachment_exts(self) -> set[str]: + def attachment_exts( + self + ) -> set[str]: return self.__attachment_exts @property - def ext_to_mimetype(self) -> dict[str, str]: + def ext_to_mimetype( + self + ) -> dict[str, str]: return self.__ext_to_mimetype @property - def max_storage_size(self) -> int: + def max_storage_size( + self + ) -> int: return self.__max_storage_size @property - def max_sandbox_size(self) -> int: + def max_sandbox_size( + self + ) -> int: return self.__max_sandbox_size @property - def public_read_access(self) -> bool: + def public_read_access( + self + ) -> bool: return self.__public_read_access @property - def storage_protection(self) -> STORAGE_PROTECTION: + def storage_protection( + self + ) -> STORAGE_PROTECTION: return self.__storage_protection # type: ignore @property - def auth_method(self) -> str | None: + def auth_method( + self + ) -> str | None: return self.__auth_method @property - def rw_user_credentials(self) -> tuple[str, str]: + def rw_user_credentials( + self + ) -> tuple[str, str]: return self.__rw_username, self.__rw_password @property - def ro_user_credentials(self) -> tuple[str, str]: + def ro_user_credentials( + self + ) -> tuple[str, str]: return self.__ro_username, self.__ro_password SETTINGS = Settings(demisto.params()) -def get_default_gateway() -> str | None: +def get_default_gateway( +) -> str | None: """ Get a default gateway address. :return: A default gateway address found. @@ -3092,7 +3143,8 @@ def get_default_gateway() -> str | None: return None -def get_local_ip() -> str: +def get_local_ip( +) -> str: """ Get an external IP address. NOTE: https://stackoverflow.com/questions/166506/finding-local-ip-addresses-using-pythons-stdlib @@ -3109,7 +3161,9 @@ def get_local_ip() -> str: return ip -def detect_service_ip_port(settings: Settings) -> tuple[str, int]: +def detect_service_ip_port( + settings: Settings +) -> tuple[str, int]: """ Detect the IP:port of the local server :param settings: The instance settings. @@ -3129,7 +3183,10 @@ def detect_service_ip_port(settings: Settings) -> tuple[str, int]: return server_addr, server_port -def new_client(host_port: tuple[str, int], settings: Settings) -> BaseClient: +def new_client( + host_port: tuple[str, int], + settings: Settings +) -> BaseClient: """ Create a new BasicClient :param host_port: The IP and port number @@ -3152,24 +3209,33 @@ def new_client(host_port: tuple[str, int], settings: Settings) -> BaseClient: return BaseClient(f'http://{server_addr}:{server_port}', auth=auth) -def to_abs_path(path: str) -> str: +def to_abs_path( + path: str +) -> str: return path if path.startswith(os.sep) else os.sep + path -def pretty_size(size: int) -> str: +def pretty_size( + size: int +) -> str: units = ['bytes', 'KB', 'MB', 'GB', 'TB'] i = min(int(math.log(size or 1, 1024)), len(units) - 1) return f'{size / 1024 ** i:.{max(min(1, i), 0)}f} {units[i]}' class NonceManager: - def __init__(self): + def __init__( + self + ) -> None: self.__cache: dict[str, dict[str, Any]] = {} self.__expires = 10 self.__max_replays = 20 self.__max_nonce = 4096 - def __remove_expired_oldest(self, now: int | None = None) -> bool: + def __remove_expired_oldest( + self, + now: int | None = None + ) -> bool: """ Remove the expired oldest nonce from the cache :param now: The current timestamp @@ -3192,7 +3258,9 @@ def __remove_expired_oldest(self, now: int | None = None) -> bool: else: return False - def __new_nonce(self) -> tuple[int, str]: + def __new_nonce( + self + ) -> tuple[int, str]: """ Create a new nonce :return: The current timestamp and a new nonce. @@ -3201,7 +3269,10 @@ def __new_nonce(self) -> tuple[int, str]: nonce = str(now) + ':' + os.urandom(16).hex() return now, nonce - def validate_nonce(self, nonce: str) -> VALIDATION: + def validate_nonce( + self, + nonce: str + ) -> VALIDATION: """ Check if the nonce is valid. :param nonce: A nonce to validate. @@ -3223,7 +3294,9 @@ def validate_nonce(self, nonce: str) -> VALIDATION: } return VALIDATION.SUCCESS - def gen_nonce(self) -> str: + def gen_nonce( + self + ) -> str: """ Generate a new nonce :return: A new nonce generated. @@ -3244,7 +3317,10 @@ def gen_nonce(self) -> str: class Master: - def __init__(self, storage_protection: STORAGE_PROTECTION): + def __init__( + self, + storage_protection: STORAGE_PROTECTION + ) -> None: """ Initialize the master DB manager :param storage_protection: The storage protection mode @@ -3254,17 +3330,23 @@ def __init__(self, storage_protection: STORAGE_PROTECTION): self.__total_data_usage = None @property - def storage_protection(self) -> STORAGE_PROTECTION: + def storage_protection( + self + ) -> STORAGE_PROTECTION: return self.__storage_protection - def reset(self) -> None: + def reset( + self + ) -> None: """ Wipe the repository on the normal mode, and Restore the repository on the sandbox mode. """ self.__repo = None if self.storage_protection == STORAGE_PROTECTION.READ_WRITE: set_integration_context({}) - def get_full_repository(self) -> dict[str, str]: + def get_full_repository( + self + ) -> dict[str, str]: """ Get the full context data from the integration context :return: The integration context. @@ -3280,7 +3362,9 @@ def get_full_repository(self) -> dict[str, str]: return ctx - def get_attrs_repository(self) -> dict[str, str]: + def get_attrs_repository( + self + ) -> dict[str, str]: """ Get the file entries without payloads from the integration context. :return: The integration context without file payloads. @@ -3291,7 +3375,10 @@ def get_attrs_repository(self) -> dict[str, str]: else: return self.get_full_repository() - def set_full_repository(self, repo: dict[str, str]) -> None: + def set_full_repository( + self, + repo: dict[str, str] + ) -> None: """ Set the full context data to the integration context. :param repo: The integration context. @@ -3302,7 +3389,9 @@ def set_full_repository(self, repo: dict[str, str]) -> None: self.__repo = {k: v for k, v in repo.items() if k.startswith(os.sep)} set_integration_context(repo) - def total_data_usage(self) -> tuple[int, int]: + def total_data_usage( + self + ) -> tuple[int, int]: """ Get the data usage :return: The sum of all the saved sizes in the DB / on the file system. @@ -3322,8 +3411,11 @@ def total_data_usage(self) -> tuple[int, int]: else: data_usage += int(attrs.get('saved-size') or 0) - if self.__total_data_usage is None or \ - self.storage_protection != STORAGE_PROTECTION.SANDBOX: + if ( + self.__total_data_usage is None + + or self.storage_protection != STORAGE_PROTECTION.SANDBOX + ): self.__total_data_usage = data_usage # type: ignore return data_usage, file_usage else: @@ -3334,11 +3426,16 @@ def total_data_usage(self) -> tuple[int, int]: class AttrsRepository: - def __init__(self, repo: dict[str, str]): + def __init__( + self, + repo: dict[str, str] + ) -> None: self.repo = repo @staticmethod - def __split_path_components(abs_path: str) -> list[str]: + def __split_path_components( + abs_path: str + ) -> list[str]: comps = [] path = os.path.normpath(to_abs_path(abs_path)) while path: @@ -3349,12 +3446,17 @@ def __split_path_components(abs_path: str) -> list[str]: path = parent return list(reversed(comps[:-1])) - def is_file_type(self, data_type: str | None) -> bool: + def is_file_type( + self, + data_type: str | None + ) -> bool: return data_type == 'gzip-file' - def list_file_entries(self, - abs_dir: str, - recursive: bool = False) -> dict[str, dict[str, Any]]: + def list_file_entries( + self, + abs_dir: str, + recursive: bool = False + ) -> dict[str, dict[str, Any]]: """ List the file entries on a directory :param abs_dir: The directory path in absolute path on which to list file entries @@ -3405,7 +3507,10 @@ def list_file_entries(self, class FullRepository(AttrsRepository): @staticmethod - def new_decoder(data_type: str | None, data: str) -> Generator[bytes, None, None]: + def new_decoder( + data_type: str | None, + data: str + ) -> Iterator[bytes]: """ Decode a file content in chunks :param data_type: The encoding mode of the payload. @@ -3423,7 +3528,10 @@ def new_decoder(data_type: str | None, data: str) -> Generator[bytes, None, None raise DemistoException(f'Unknown data type: {data_type}') @staticmethod - def new_reader(data_type: str | None, path: str) -> Generator[bytes, None, None]: + def new_reader( + data_type: str | None, + path: str + ) -> Iterator[bytes]: """ Read a file content in chunks :param data_type: The file type. @@ -3436,7 +3544,12 @@ def new_reader(data_type: str | None, path: str) -> Generator[bytes, None, None] else: raise DemistoException(f'Unknown data type: {data_type}') - def __init__(self, repo: Master, data_usage_limit: int = 0, file_usage_limit: int = 0): + def __init__( + self, + repo: Master, + data_usage_limit: int = 0, + file_usage_limit: int = 0 + ) -> None: """ Initialize the instance. :param repo: The repository @@ -3449,7 +3562,9 @@ def __init__(self, repo: Master, data_usage_limit: int = 0, file_usage_limit: in self.__file_usage_limit = file_usage_limit self.__total_data_usage, self.__total_file_usage = repo.total_data_usage() - def remove_orphan_entries(self) -> None: + def remove_orphan_entries( + self + ) -> None: """ Remove unlinked data entries """ repo = self.repo @@ -3466,7 +3581,10 @@ def remove_orphan_entries(self) -> None: for data_uuid in (data_uuids - keep_uuids): repo.pop(data_uuid, None) - def remove_entry(self, abs_path: str) -> None: + def remove_entry( + self, + abs_path: str + ) -> None: """ Remove the file/directory entry :param abs_path: The path in absolute path @@ -3496,7 +3614,12 @@ def remove_entry(self, abs_path: str) -> None: os.unlink(path) self.__total_file_usage -= attrs.get('saved-size') or 0 - def save_file(self, abs_dir: str, name: str, data: IO[bytes]) -> dict[str, Any]: + def save_file( + self, + abs_dir: str, + name: str, + data: IO[bytes] + ) -> dict[str, Any]: """ Save a file :param abs_dir: The directory path in absolute path @@ -3576,7 +3699,12 @@ def save_file(self, abs_dir: str, name: str, data: IO[bytes]) -> dict[str, Any]: os.unlink(gtmp.name) raise - def save_files(self, abs_dir: str, files: dict[str, IO[bytes]], extract: bool) -> None: + def save_files( + self, + abs_dir: str, + files: dict[str, IO[bytes]], + extract: bool + ) -> None: """ Save files :param abs_dir: The directory path in absolute path @@ -3602,8 +3730,10 @@ def save_files(self, abs_dir: str, files: dict[str, IO[bytes]], extract: bool) - else: self.save_file(abs_dir, name, file) - def read_file(self, abs_path: str) -> tuple[dict[str, Any], - Generator[bytes, None, None] | None]: + def read_file( + self, + abs_path: str + ) -> tuple[dict[str, Any], Iterator[bytes] | None]: """ Read a file content with its attributes :param abs_path: The file path @@ -3622,7 +3752,9 @@ def read_file(self, abs_path: str) -> tuple[dict[str, Any], return attrs, FullRepository.new_decoder(data_type, data) return {}, None - def archive_zip(self) -> Generator[bytes, None, None]: + def archive_zip( + self + ) -> Iterator[bytes]: """ Build a zip stream in chunks by archiving all the files """ repo = self.repo @@ -3653,7 +3785,9 @@ def archive_zip(self) -> Generator[bytes, None, None]: while chunk := ztmp.read(4096): yield chunk - def commit(self): + def commit( + self + ) -> None: """ Write the cache modified by transactions to the master Note: In the copy-on-write mode, the master is not modified. @@ -3662,7 +3796,9 @@ def commit(self): @bottle.error(404) -def error404(error): +def error404( + error +) -> str: return ''' @@ -3679,11 +3815,18 @@ def error404(error): class ServiceHandler: - def __init__(self, settings: Settings, master: Master): + def __init__( + self, + settings: Settings, + master: Master + ) -> None: self.__settings = settings self.__master = master - def __validate_basic_auth(self, auth_value) -> set[PERMISSION]: + def __validate_basic_auth( + self, + auth_value: str + ) -> set[PERMISSION]: """ Checks whether the authentication is valid :param auth_value: Credentials given to the Authentication header @@ -3694,11 +3837,13 @@ def __validate_basic_auth(self, auth_value) -> set[PERMISSION]: return self.__settings.get_user_permissions(username) return set() - def __validate_digest_auth(self, - auth_value: str, - request_method: str, - realm: str, - hash_name: tuple[str, str]) -> tuple[VALIDATION, set[PERMISSION]]: + def __validate_digest_auth( + self, + auth_value: str, + request_method: str, + realm: str, + hash_name: tuple[str, str] + ) -> tuple[VALIDATION, set[PERMISSION]]: """ Checks whether the authentication is valid :param auth_value: Credentials given to the Authentication header @@ -3714,26 +3859,49 @@ def __validate_digest_auth(self, username = username or '' hhash_name, phash_name = hash_name - if params.get('algorithm', 'MD5').upper() != phash_name or\ - not (qnonce := params.get('nonce')) or\ - not (quri := params.get('uri')) or\ - not (qresponse := params.get('response')) or\ - not (qcnonce := params.get('cnonce')) or\ - not (qnc := params.get('nc')): + if ( + params.get('algorithm', 'MD5').upper() != phash_name + + or not (qnonce := params.get('nonce')) + + or not (quri := params.get('uri')) + + or not (qresponse := params.get('response')) + + or not (qcnonce := params.get('cnonce')) + + or not (qnc := params.get('nc')) + ): return VALIDATION.FAILURE, set() if (result := NONCE_MANAGER.validate_nonce(qnonce)) != VALIDATION.SUCCESS: return result, set() - a1 = hashlib.new(hhash_name, username.encode() + b':' + realm.encode() + b':' + password.encode()).hexdigest() - a2 = hashlib.new(hhash_name, f'{request_method}:{quri}'.encode()).hexdigest() - oresponse = hashlib.new(hhash_name, f'{a1}:{qnonce}:{qnc}:{qcnonce}:auth:{a2}'.encode()).hexdigest() + a1 = hashlib.new( + hhash_name, + username.encode() + b':' + realm.encode() + b':' + password.encode() + ).hexdigest() + + a2 = hashlib.new( + hhash_name, + f'{request_method}:{quri}'.encode() + ).hexdigest() + + oresponse = hashlib.new( + hhash_name, + f'{a1}:{qnonce}:{qnc}:{qcnonce}:auth:{a2}'.encode() + ).hexdigest() + if qresponse == oresponse: return VALIDATION.SUCCESS, self.__settings.get_user_permissions(username) else: return VALIDATION.FAILURE, set() - def authenticate(self, request: BaseRequest, permission: PERMISSION) -> HTTPResponse | None: + def authenticate( + self, + request: BaseRequest, + permission: PERMISSION + ) -> HTTPResponse | None: """ Authenticate user to the required permission :param request: The request data @@ -3758,43 +3926,48 @@ def authenticate(self, request: BaseRequest, permission: PERMISSION) -> HTTPResp response = HTTPResponse() response.status = 401 - if required_auth_method == 'Basic': - """ - Basic Auth - """ - if qauth_method == 'Basic' and \ - permission in self.__validate_basic_auth(qauth_value): - return None - - response.set_header('WWW-Authenticate', f'Basic realm="{realm}"') - elif required_auth_method in ('Digest-md5', 'Digest-sha256'): - """ - Digest Auth - """ - _, _, hhash_name = required_auth_method.partition('-') - rhash_name = { - 'md5': 'MD5', - 'sha256': 'SHA-256', - }[hhash_name] - - result = VALIDATION.FAILURE - if qauth_method == 'Digest': - result, permissions = self.__validate_digest_auth(qauth_value, - request.method, - realm, - (hhash_name, rhash_name)) - if result == VALIDATION.SUCCESS and permission in permissions: + match required_auth_method: + case 'Basic': + """ + Basic Auth + """ + if qauth_method == 'Basic' and \ + permission in self.__validate_basic_auth(qauth_value): return None - nonce = NONCE_MANAGER.gen_nonce() - auth_value = f'Digest realm="{realm}", nonce="{nonce}", algorithm={rhash_name}, qop=auth' - if result == VALIDATION.NONCE_EXPIRED: - auth_value += ', stale=true' - response.set_header('WWW-Authenticate', auth_value) + response.set_header('WWW-Authenticate', f'Basic realm="{realm}"') + + case 'Digest-md5' | 'Digest-sha256': + """ + Digest Auth + """ + _, _, hhash_name = required_auth_method.partition('-') + rhash_name = { + 'md5': 'MD5', + 'sha256': 'SHA-256', + }[hhash_name] + + result = VALIDATION.FAILURE + if qauth_method == 'Digest': + result, permissions = self.__validate_digest_auth(qauth_value, + request.method, + realm, + (hhash_name, rhash_name)) + if result == VALIDATION.SUCCESS and permission in permissions: + return None + + nonce = NONCE_MANAGER.gen_nonce() + auth_value = f'Digest realm="{realm}", nonce="{nonce}", algorithm={rhash_name}, qop=auth' + if result == VALIDATION.NONCE_EXPIRED: + auth_value += ', stale=true' + response.set_header('WWW-Authenticate', auth_value) return response - def __handle_get_resource(self, request: BaseRequest) -> HTTPResponse: + def __handle_get_resource( + self, + request: BaseRequest + ) -> HTTPResponse: global RESOURCES_ZIP if RESOURCES_ZIP is None: RESOURCES_ZIP = zipfile.ZipFile(io.BytesIO(base64.b64decode(RESOURCES_ZIP_B64)), 'r') @@ -3814,7 +3987,9 @@ def __handle_get_resource(self, request: BaseRequest) -> HTTPResponse: response.body = RESOURCES_ZIP.read(request.query.name) return response - def __handle_get_status(self) -> HTTPResponse: + def __handle_get_status( + self + ) -> HTTPResponse: data_usage, file_usage = self.__master.total_data_usage() storage_protection = { @@ -3837,7 +4012,10 @@ def __handle_get_status(self) -> HTTPResponse: response.body = resp return response - def __handle_get_ls(self, request: BaseRequest) -> HTTPResponse: + def __handle_get_ls( + self, + request: BaseRequest + ) -> HTTPResponse: dirpath = to_abs_path(request.query.dir) recursive = argToBoolean(request.query.recursive or 'false') repo = AttrsRepository(self.__master.get_attrs_repository()) @@ -3849,7 +4027,10 @@ def __handle_get_ls(self, request: BaseRequest) -> HTTPResponse: } return response - def __handle_get_download(self, request: BaseRequest) -> HTTPResponse: + def __handle_get_download( + self, + request: BaseRequest + ) -> HTTPResponse: path = to_abs_path(request.query.path) attrs, chunks = FullRepository(self.__master).read_file(to_abs_path(path)) if chunks is None: @@ -3863,7 +4044,9 @@ def __handle_get_download(self, request: BaseRequest) -> HTTPResponse: response.body = chunks return response - def __handle_get_archive_all(self) -> HTTPResponse: + def __handle_get_archive_all( + self + ) -> HTTPResponse: filename = datetime.now(timezone.utc).strftime('archive-%Y%m%d%H%M%S.zip') response = HTTPResponse() @@ -3872,26 +4055,36 @@ def __handle_get_archive_all(self) -> HTTPResponse: response.body = FullRepository(self.__master).archive_zip() return response - def __handle_post_health(self, request: BaseRequest) -> HTTPResponse | None: + def __handle_post_health( + self, + request: BaseRequest + ) -> HTTPResponse | None: if permission := request.json.get('permission'): return self.authenticate( request, PERMISSION.WRITE if permission == 'write' else PERMISSION.READ) return None - def __handle_post_cleanup(self) -> None: + def __handle_post_cleanup( + self + ) -> None: if self.__master.storage_protection == STORAGE_PROTECTION.READ_ONLY: raise DemistoException('The storage is read-only mode.') self.__master.set_full_repository({}) - def __handle_post_reset(self) -> None: + def __handle_post_reset( + self + ) -> None: if self.__master.storage_protection == STORAGE_PROTECTION.READ_ONLY: raise DemistoException('The storage is read-only mode.') self.__master.reset() - def __handle_post_delete(self, request: BaseRequest) -> None: + def __handle_post_delete( + self, + request: BaseRequest + ) -> None: if self.__master.storage_protection == STORAGE_PROTECTION.READ_ONLY: raise DemistoException('The storage is read-only mode.') @@ -3902,7 +4095,10 @@ def __handle_post_delete(self, request: BaseRequest) -> None: repo.remove_orphan_entries() repo.commit() - def __handle_post_upload(self, request: BaseRequest) -> None: + def __handle_post_upload( + self, + request: BaseRequest + ) -> None: if self.__master.storage_protection == STORAGE_PROTECTION.READ_ONLY: raise DemistoException('The storage is read-only mode.') @@ -3918,7 +4114,10 @@ def __handle_post_upload(self, request: BaseRequest) -> None: repo.remove_orphan_entries() repo.commit() - def get(self, request: BaseRequest) -> HTTPResponse: + def get( + self, + request: BaseRequest + ) -> HTTPResponse: if response := self.authenticate(request, PERMISSION.READ): return response @@ -3942,7 +4141,11 @@ def get(self, request: BaseRequest) -> HTTPResponse: response.body = HTML_MAIN return response - def get_file(self, request: BaseRequest, path: str) -> HTTPResponse: + def get_file( + self, + request: BaseRequest, + path: str + ) -> HTTPResponse: if response := self.authenticate(request, PERMISSION.READ): return response @@ -3962,29 +4165,37 @@ def get_file(self, request: BaseRequest, path: str) -> HTTPResponse: response.body = chunks return response - def post(self, request: BaseRequest) -> HTTPResponse: + def post( + self, + request: BaseRequest + ) -> HTTPResponse: response = HTTPResponse() response.content_type = 'application/json' try: if request.content_type == 'application/json': q = isinstance(request.json, dict) and request.json.get('q') - if q == 'health': - if resp := self.__handle_post_health(request): - return resp - elif q == 'cleanup': - if resp := self.authenticate(request, PERMISSION.WRITE): - return resp - self.__handle_post_cleanup() - elif q == 'reset': - if resp := self.authenticate(request, PERMISSION.WRITE): - return resp - self.__handle_post_reset() - elif q == 'delete': - if resp := self.authenticate(request, PERMISSION.WRITE): - return resp - self.__handle_post_delete(request) - else: - raise DemistoException('Unknown request') + match q: + case 'health': + if resp := self.__handle_post_health(request): + return resp + + case 'cleanup': + if resp := self.authenticate(request, PERMISSION.WRITE): + return resp + self.__handle_post_cleanup() + + case 'reset': + if resp := self.authenticate(request, PERMISSION.WRITE): + return resp + self.__handle_post_reset() + + case 'delete': + if resp := self.authenticate(request, PERMISSION.WRITE): + return resp + self.__handle_post_delete(request) + + case _: + raise DemistoException('Unknown request') else: if request.forms.q == 'upload': if resp := self.authenticate(request, PERMISSION.WRITE): @@ -4000,29 +4211,39 @@ def post(self, request: BaseRequest) -> HTTPResponse: @bottle.route('/', method='GET') -def process_download_file(path): +def process_download_file( + path +): handler = ServiceHandler(settings=SETTINGS, master=MASTER_REPOSITORY) return handler.get_file(bottle.request, path) @bottle.route('/', method='POST') -def process_root_post(): +def process_root_post( +): handler = ServiceHandler(settings=SETTINGS, master=MASTER_REPOSITORY) return handler.post(bottle.request) @bottle.route('/', method='GET') -def process_root_get(): +def process_root_get( +): handler = ServiceHandler(settings=SETTINGS, master=MASTER_REPOSITORY) return handler.get(bottle.request) -def run_long_running(settings: Settings, is_test: bool = False): +def run_long_running( + settings: Settings, + is_test: bool = False +) -> None: if not is_test: bottle.run(host='0.0.0.0', port=settings.docker_port, debug=True) -def test_module(args: dict[str, str], settings: Settings) -> str: +def test_module( + args: dict[str, str], + settings: Settings +) -> str: """ Validates: """ @@ -4030,7 +4251,10 @@ def test_module(args: dict[str, str], settings: Settings) -> str: return 'ok' -def command_status(args: dict[str, str], settings: Settings) -> CommandResults: +def command_status( + args: dict[str, str], + settings: Settings +) -> CommandResults: """ Get the service status :param args: The parameters which were given to the command. @@ -4068,41 +4292,59 @@ def command_status(args: dict[str, str], settings: Settings) -> CommandResults: return CommandResults( outputs_prefix='WebFileRepository.Status', outputs=outputs, - readable_output=tblToMd('Service Status', readable_outputs, headers=readable_outputs.keys()), - raw_response=outputs) + readable_output=tblToMd( + 'Service Status', + readable_outputs, + headers=readable_outputs.keys() + ), + raw_response=outputs + ) -def command_cleanup(args: dict[str, str], settings: Settings) -> str: +def command_cleanup( + args: dict[str, str], + settings: Settings +) -> str: """ Remove all the files from the repository :param args: The parameters which were given to the command. :param settings: The instance settings. """ client = new_client(detect_service_ip_port(settings), settings) - resp = client._http_request('POST', - json_data={'q': 'cleanup'}, - raise_on_status=True) + resp = client._http_request( + 'POST', + json_data={'q': 'cleanup'}, + raise_on_status=True + ) if not resp.get('success'): raise ValueError(f'Failed to clean up entries: {resp.get("message")}') return 'Done.' -def command_reset(args: dict[str, str], settings: Settings) -> str: +def command_reset( + args: dict[str, str], + settings: Settings +) -> str: """ Reset the repostiory data :param args: The parameters which were given to the command. :param settings: The instance settings. """ client = new_client(detect_service_ip_port(settings), settings) - resp = client._http_request('POST', - json_data={'q': 'reset'}, - raise_on_status=True) + resp = client._http_request( + 'POST', + json_data={'q': 'reset'}, + raise_on_status=True + ) if not resp.get('success'): raise ValueError(f'Failed to reset the repository: {resp.get("message")}') return 'Done.' -def command_upload_as_file(args: dict[str, str], settings: Settings) -> str: +def command_upload_as_file( + args: dict[str, str], + settings: Settings +) -> str: """ Upload data as a file :param args: The parameters which were given to the command. @@ -4135,7 +4377,10 @@ def command_upload_as_file(args: dict[str, str], settings: Settings) -> str: return 'Done.' -def command_upload_file(args: dict[str, str], settings: Settings) -> str: +def command_upload_file( + args: dict[str, str], + settings: Settings +) -> str: """ Upload a file :param args: The parameters which were given to the command. @@ -4157,13 +4402,21 @@ def command_upload_file(args: dict[str, str], settings: Settings) -> str: dir=args.get('upload_directory', '/'), extract=args.get('extract_archive', 'false'), ) - resp = client._http_request('POST', data=data, files=files, raise_on_status=True) + resp = client._http_request( + 'POST', + data=data, + files=files, + raise_on_status=True + ) if not resp.get('success'): raise ValueError(f'Failed to upload a file: {resp.get("message")}') return 'Done.' -def command_upload_files(args: dict[str, str], settings: Settings) -> str: +def command_upload_files( + args: dict[str, str], + settings: Settings +) -> str: """ Upload files :param args: The parameters which were given to the command. @@ -4184,22 +4437,32 @@ def command_upload_files(args: dict[str, str], settings: Settings) -> str: dir=args.get('upload_directory', '/'), extract=args.get('extract_archive', 'false'), ) - resp = client._http_request('POST', data=data, files=files, raise_on_status=True) + resp = client._http_request( + 'POST', + data=data, + files=files, + raise_on_status=True + ) if not resp.get('success'): raise ValueError(f'Failed to upload files: {resp.get("message")}') return 'Done.' -def command_list_files(args: dict[str, str], settings: Settings) -> CommandResults: +def command_list_files( + args: dict[str, str], + settings: Settings +) -> CommandResults: """ List file entries in the repository :param args: The parameters which were given to the command. :param settings: The instance settings. """ class __MappingValue: - def __init__(self, - readable_value: Callable[[Any], Any], - context_value: Callable[[Any], Any]): + def __init__( + self, + readable_value: Callable[[Any], Any], + context_value: Callable[[Any], Any] + ) -> None: self.readable_value = readable_value self.context_value = context_value @@ -4209,7 +4472,11 @@ def __init__(self, recursive=args.get('recursive', 'false') ) client = new_client(detect_service_ip_port(settings), settings) - resp = client._http_request('GET', params=query_params, raise_on_status=True) + resp = client._http_request( + 'GET', + params=query_params, + raise_on_status=True + ) ents = resp.get('data') if not isinstance(ents, list): raise ValueError('Failed to list file entries') @@ -4224,11 +4491,15 @@ def __init__(self, lambda x: datetime.fromtimestamp(int(x or 0), timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC'), lambda x: datetime.fromtimestamp(int(x or 0), timezone.utc).isoformat()), } - - outputs = [{camelize_string(k, '-'): v.context_value(ent[k]) for k, v in mapping.items()} for ent in file_ents] + outputs = [ + { + camelize_string(k, '-'): v.context_value(ent[k]) + for k, v in mapping.items() + } for ent in file_ents + ] return CommandResults( - outputs_prefix='WebFileRepository.Files', + outputs_prefix='WebFileRepository.Files(val.Path === obj.Path)', outputs=outputs, readable_output=tblToMd( 'File List', @@ -4236,28 +4507,37 @@ def __init__(self, headers=mapping.keys(), headerTransform=lambda x: x.replace('-', ' ').title() ), - raw_response=file_ents) + raw_response=file_ents + ) -def command_remove_files(args: dict[str, str], settings: Settings) -> str: +def command_remove_files( + args: dict[str, str], + settings: Settings +) -> str: """ Remove files from the repository :param args: The parameters which were given to the command. :param settings: The instance settings. """ client = new_client(detect_service_ip_port(settings), settings) - resp = client._http_request('POST', - json_data={ - 'q': 'delete', - 'path': argToList(args.get('paths', [])) - }, - raise_on_status=True) + resp = client._http_request( + 'POST', + json_data={ + 'q': 'delete', + 'path': argToList(args.get('paths', [])) + }, + raise_on_status=True + ) if not resp.get('success'): raise ValueError(f'Failed to remove files: {resp.get("message")}') return 'Done.' -def command_download_file(args: dict[str, str], settings: Settings) -> dict[str, Any]: +def command_download_file( + args: dict[str, str], + settings: Settings +) -> dict[str, Any]: """ Download a file from the repository :param args: The parameters which were given to the command. @@ -4267,35 +4547,120 @@ def command_download_file(args: dict[str, str], settings: Settings) -> dict[str, raise DemistoException('A file path is required.') client = new_client(detect_service_ip_port(settings), settings) - resp = client._http_request('GET', - params={ - 'q': 'download', - 'path': path - }, - raise_on_status=True, - resp_type='response') - - if not (filename := args.get('save_as')) and (content_disposition := resp.headers.get('Content-Disposition')): - cdp = email_parser.Parser().parsestr(f'Content-Disposition: {content_disposition}', headersonly=True) + resp = client._http_request( + 'GET', + params={ + 'q': 'download', + 'path': path + }, + raise_on_status=True, + resp_type='response' + ) + if ( + not (filename := args.get('save_as')) + + and (content_disposition := resp.headers.get('Content-Disposition')) + ): + cdp = email_parser.Parser().parsestr( + f'Content-Disposition: {content_disposition}', + headersonly=True + ) filename = cdp.get_filename() return fileResult(filename or str(uuid.uuid4()), resp.content) -def command_archive_zip(args: dict[str, str], settings: Settings) -> dict[str, Any]: - """ Archive all the files into a zip file +def command_download_as_text( + args: dict[str, str], + settings: Settings +) -> CommandResults: + """ Download a file from the repository, and set the data to the context :param args: The parameters which were given to the command. :param settings: The instance settings. """ + if not (path := args.get('path')): + raise DemistoException('A file path is required.') + client = new_client(detect_service_ip_port(settings), settings) - resp = client._http_request('GET', - params={'q': 'archive-all'}, - raise_on_status=True, - resp_type='response') + resp = client._http_request( + 'GET', + params={ + 'q': 'download', + 'path': path + }, + raise_on_status=True, + resp_type='response' + ) + encoding = args.get('encoding', 'utf-8') + match encoding: + case 'utf-8': + text = resp.content.decode(encoding) + + case 'base64': + text = base64.b64encode(resp.content).decode('utf-8') + + case _: + raise ValueError(f'Invalid encoding name: {encoding}') + + raw_response = { + 'path': os.path.normpath(path if path.startswith('/') else '/' + path), + 'name': os.path.basename(path), + 'data': text, + 'size': len(resp.content), + 'encoding': encoding + } + outputs = {camelize_string(k, '-'): v for k, v in raw_response.items()} + + if encoding not in ('base64'): + readable_output = ( + f'### {os.path.basename(path)}\n' + '```\n' + f'{text}\n' + '```\n' + ) + else: + readable_output = tblToMd( + os.path.basename(path), + { + 'Path': raw_response['path'], + 'Size': pretty_size(len(resp.content)) + } + ) + + return CommandResults( + outputs_prefix='WebFileRepository.Files(val.Path === obj.Path)', + outputs=outputs, + readable_output=readable_output, + raw_response=raw_response + ) - if not (filename := args.get('save_as')) and (content_disposition := resp.headers.get('Content-Disposition')): - cdp = email_parser.Parser().parsestr(f'Content-Disposition: {content_disposition}', headersonly=True) + +def command_archive_zip( + args: dict[str, str], + settings: Settings +) -> dict[str, Any]: + """ Archive all the files into a zip file + + :param args: The parameters which were given to the command. + :param settings: The instance settings. + """ + client = new_client(detect_service_ip_port(settings), settings) + resp = client._http_request( + 'GET', + params={'q': 'archive-all'}, + raise_on_status=True, + resp_type='response' + ) + if ( + not (filename := args.get('save_as')) + + and (content_disposition := resp.headers.get('Content-Disposition')) + ): + cdp = email_parser.Parser().parsestr( + f'Content-Disposition: {content_disposition}', + headersonly=True + ) filename = cdp.get_filename() return fileResult(filename or str(uuid.uuid4()), resp.content) @@ -4319,6 +4684,7 @@ def main() -> None: 'wfr-list-files': command_list_files, 'wfr-remove-files': command_remove_files, 'wfr-download-file': command_download_file, + 'wfr-download-as-text': command_download_as_text, 'wfr-archive-zip': command_archive_zip, } try: diff --git a/Packs/WebFileRepository/Integrations/WebFileRepository/WebFileRepository.yml b/Packs/WebFileRepository/Integrations/WebFileRepository/WebFileRepository.yml index 6d41133e3416..7855f1581ce5 100644 --- a/Packs/WebFileRepository/Integrations/WebFileRepository/WebFileRepository.yml +++ b/Packs/WebFileRepository/Integrations/WebFileRepository/WebFileRepository.yml @@ -217,6 +217,36 @@ script: name: save_as description: Download a file from the repository. name: wfr-download-file + - arguments: + - name: path + required: true + description: The file path. + - name: encoding + auto: PREDEFINED + predefined: + - utf-8 + - base64 + description: Encoding type to convert the file data when setting to the context. + Default is utf-8. + defaultValue: utf-8 + outputs: + - contextPath: WebFileRepository.Files.Name + description: The file name. + type: string + - contextPath: WebFileRepository.Files.Path + description: The file path. + type: string + - contextPath: WebFileRepository.Files.Size + description: The file size in bytes. + type: number + - contextPath: WebFileRepository.Files.Data + description: The file data encoded in the encoding. + type: string + - contextPath: WebFileRepository.Files.Encoding + description: The encoding name. + type: string + description: Retrieve the file data from the repository into the context. + name: wfr-download-as-text - arguments: - description: The name to give the archive-file to save. name: save_as diff --git a/Packs/WebFileRepository/Integrations/WebFileRepository/WebFileRepository_test.py b/Packs/WebFileRepository/Integrations/WebFileRepository/WebFileRepository_test.py index e3025632ef96..727097df82d9 100644 --- a/Packs/WebFileRepository/Integrations/WebFileRepository/WebFileRepository_test.py +++ b/Packs/WebFileRepository/Integrations/WebFileRepository/WebFileRepository_test.py @@ -1717,6 +1717,88 @@ def test_command_download_file(mocker, path, save_as, content_filename): assert res['File'] == filename +@pytest.mark.parametrize(argnames='path, ' + 'encoding, ' + 'content, ' + 'results_filename', + argvalues=[ + ('/test.dat', + None, + 'Hello!', + 'test_data/download_as_text_01.json' + ), + ('/test.dat', + 'utf-8', + 'Hello!', + 'test_data/download_as_text_01.json' + ), + ('/test.dat', + 'base64', + 'Hello!', + 'test_data/download_as_text_02.json' + ), + ('test.dat', + None, + 'Hello!', + 'test_data/download_as_text_01.json' + ), + ('test.dat', + 'utf-8', + 'Hello!', + 'test_data/download_as_text_01.json' + ), + ('test.dat', + 'base64', + 'Hello!', + 'test_data/download_as_text_02.json' + ), + ]) +def test_command_download_as_text(mocker, path, encoding, content, results_filename): + """ + Given: + Some patterns of parameters for command_download_as_text + + When: + Running script to send a request. + + Then: + Validate the right response returns. + """ + params = { + 'longRunningPort': '8000', + 'rwCredentials': {}, + 'authenticationMethod': None, + 'publicReadAccess': True, + 'mimeTypes': None, + 'mergeMimeTypes': True, + 'attachmentExtensions': None, + 'storageProtection': 'read/write', + 'maxStorageSize': None, + 'maxSandboxSize': None, + } + mocker.patch.object(demisto, 'params', return_value=params) + + client = MockBaseClient(mocker, headers={}, content=content.encode('utf-8')) + mocker.patch.object(WebFileRepository, 'new_client', return_value=client) + + importlib.reload(WebFileRepository) + + args = assign_params( + path=path, + encoding=encoding + ) + settings = WebFileRepository.Settings(params) + res = WebFileRepository.command_download_as_text(args, settings).to_context() + + keys = ('Type', 'ContentFormat', 'Contents', 'EntryContext') + res = {k: v for k, v in res.items() if k in keys} + + with open(results_filename) as f: + expected = {k: v for k, v in json.load(f).items() if k in keys} + + assert equals_object(res, expected) + + @pytest.mark.parametrize(argnames='save_as, ' 'content_filename', argvalues=[ diff --git a/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/download_as_text_01.json b/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/download_as_text_01.json new file mode 100644 index 000000000000..05299fe59175 --- /dev/null +++ b/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/download_as_text_01.json @@ -0,0 +1,19 @@ +{ + "Type": 1, + "Contents": { + "path": "/test.dat", + "name": "test.dat", + "data": "Hello!", + "size": 6, + "encoding": "utf-8" + }, + "EntryContext": { + "WebFileRepository.Files(val.Path === obj.Path)": { + "Path": "/test.dat", + "Name": "test.dat", + "Data": "Hello!", + "Size": 6, + "Encoding": "utf-8" + } + } +} diff --git a/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/download_as_text_02.json b/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/download_as_text_02.json new file mode 100644 index 000000000000..c7467788e745 --- /dev/null +++ b/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/download_as_text_02.json @@ -0,0 +1,19 @@ +{ + "Type": 1, + "Contents": { + "path": "/test.dat", + "name": "test.dat", + "data": "SGVsbG8h", + "size": 6, + "encoding": "base64" + }, + "EntryContext": { + "WebFileRepository.Files(val.Path === obj.Path)": { + "Path": "/test.dat", + "Name": "test.dat", + "Data": "SGVsbG8h", + "Size": 6, + "Encoding": "base64" + } + } +} diff --git a/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/list_files_results_01.json b/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/list_files_results_01.json index 2b23cc60ca33..01df90f43483 100644 --- a/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/list_files_results_01.json +++ b/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/list_files_results_01.json @@ -40,7 +40,7 @@ } ], "EntryContext": { - "WebFileRepository.Files": [{ + "WebFileRepository.Files(val.Path === obj.Path)": [{ "Name": "a.dat", "Path": "/a.dat", "Size": 3, diff --git a/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/list_files_results_02.json b/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/list_files_results_02.json index 2b23cc60ca33..01df90f43483 100644 --- a/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/list_files_results_02.json +++ b/Packs/WebFileRepository/Integrations/WebFileRepository/test_data/list_files_results_02.json @@ -40,7 +40,7 @@ } ], "EntryContext": { - "WebFileRepository.Files": [{ + "WebFileRepository.Files(val.Path === obj.Path)": [{ "Name": "a.dat", "Path": "/a.dat", "Size": 3, diff --git a/Packs/WebFileRepository/ReleaseNotes/1_0_24.md b/Packs/WebFileRepository/ReleaseNotes/1_0_24.md new file mode 100644 index 000000000000..ad6df4ec9475 --- /dev/null +++ b/Packs/WebFileRepository/ReleaseNotes/1_0_24.md @@ -0,0 +1,7 @@ + +#### Integrations + +##### Web File Repository + +- Added the ***wfr-download-as-text*** command. +- Updated the ***wfr-list-files*** to marge file parameters in the context based on the file path. diff --git a/Packs/WebFileRepository/pack_metadata.json b/Packs/WebFileRepository/pack_metadata.json index 27a7c8a7dba1..e51792553786 100644 --- a/Packs/WebFileRepository/pack_metadata.json +++ b/Packs/WebFileRepository/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Web File Repository", "description": "Simple web server with a file uploading console to store small files.\n", "support": "community", - "currentVersion": "1.0.23", + "currentVersion": "1.0.24", "author": "Masahiko Inoue", "url": "", "email": "", diff --git a/Tests/Marketplace/Tests/search_and_install_packs_test.py b/Tests/Marketplace/Tests/search_and_install_packs_test.py index fab26cd127e6..b83627f4da36 100755 --- a/Tests/Marketplace/Tests/search_and_install_packs_test.py +++ b/Tests/Marketplace/Tests/search_and_install_packs_test.py @@ -757,19 +757,23 @@ def test_get_packs_and_dependencies_to_install_no_deprecated(mocker: MockFixture Then: Ensure correct return value with no deprecated dependencies """ + client = MockClient() mocker.patch.object(script, 'search_for_deprecated_dependencies', return_value=True) + mocker.patch.object(script, "get_server_numeric_version", return_value="6.9") + mocker.patch.object(script, "create_packs_artifacts", return_value="") pack_id = "PackA" - dependencies = ["Dep1", "Dep2"] + dependencies = {"Dep1", "Dep2"} production_bucket = True dependencies_data = {} + mocker.patch.object(script, "filter_packs_by_min_server_version", return_value=dependencies) pack_ids = [pack_id] graph_dependencies = DiGraph([(d, pack_id) for d in dependencies]) result = script.get_packs_and_dependencies_to_install( - pack_ids, graph_dependencies, production_bucket, dependencies_data) + pack_ids, graph_dependencies, production_bucket, dependencies_data, client) assert result == (True, {pack_id, *dependencies}) @@ -783,11 +787,13 @@ def test_get_packs_and_dependencies_to_install_no_dependencies(mocker: MockFixtu Then: Ensure that the pack itself added to result """ + client = MockClient() mocker.patch.object(script, 'search_for_deprecated_dependencies', return_value=True) + mocker.patch.object(script, "create_packs_artifacts", return_value="") pack_id = "PackA" - dependencies = [] + dependencies = {} production_bucket = True dependencies_data = {} @@ -796,7 +802,7 @@ def test_get_packs_and_dependencies_to_install_no_dependencies(mocker: MockFixtu graph_dependencies.add_node(pack_id) result = script.get_packs_and_dependencies_to_install( - pack_ids, graph_dependencies, production_bucket, dependencies_data) + pack_ids, graph_dependencies, production_bucket, dependencies_data, client) assert result == (True, {pack_id, *dependencies}) @@ -815,19 +821,23 @@ def test_get_packs_and_dependencies_to_install_deprecated(mocker: MockFixture): - Ensure empty dependencies set returned - Ensure no deprecated dependencies flag set to False """ - mocker.patch.object(script, 'search_for_deprecated_dependencies', + client = MockClient() + mocker.patch.object(script, "search_for_deprecated_dependencies", return_value=False) + mocker.patch.object(script, "get_server_numeric_version", return_value="6.9") + mocker.patch.object(script, "create_packs_artifacts", return_value="") pack_id = "PackA" - dependencies = ["Dep1", "Dep2"] + dependencies = {"Dep1", "Dep2"} production_bucket = True dependencies_data = {} + mocker.patch.object(script, "filter_packs_by_min_server_version", return_value=dependencies) pack_ids = [pack_id] graph_dependencies = DiGraph([(d, pack_id) for d in dependencies]) result = script.get_packs_and_dependencies_to_install( - pack_ids, graph_dependencies, production_bucket, dependencies_data) + pack_ids, graph_dependencies, production_bucket, dependencies_data, client) assert result == (False, set()) @@ -997,3 +1007,62 @@ def test_search_and_install_packs_failure_install_packs(mocker: MockFixture): ) assert success is False + + +@pytest.mark.parametrize( + 'pack_version, expected_results', + [('6.5.0', {'TestPack'}), ('6.8.0', set())]) +def test_get_packs_with_higher_min_version(mocker: MockFixture, pack_version, expected_results): + """ + Given: + - Pack names to install. + - case 1: pack with a version lower than the machine. + - case 2: pack with a version higher than the machine. + When: + - Running 'get_packs_with_higher_min_version' method. + Then: + - Assert the returned packs are with higher min version than the server version. + - case 1: shouldn't filter any packs. + - case 2: should filter the pack. + """ + mocker.patch.object(script, "get_json_file", + return_value={"serverMinVersion": "6.6.0"}) + + packs_with_higher_min_version = script.get_packs_with_higher_min_version({'TestPack'}, pack_version, "") + assert packs_with_higher_min_version == expected_results + + +def test_filter_packs_by_min_server_version_packs_filtered(mocker: MockFixture): + """ + Given: + A set of pack IDs and a server version + When: + Some packs have a higher min version than the server version + Then: + It returns the pack IDs that have a lower min version + """ + packs_id = {"Pack1", "Pack2", "Pack3"} + server_version = "6.10.0" + mocker.patch.object(script, 'get_packs_with_higher_min_version', return_value={"Pack2", "Pack3"}) + + filtered_packs = script.filter_packs_by_min_server_version(packs_id, server_version, "") + + assert filtered_packs == {"Pack1"} + + +def test_filter_packs_by_min_server_version_no_packs_filtered(mocker: MockFixture): + """ + Given: + A set of pack IDs and a server version + When: + No packs have a higher min version than the server version + Then: + It returns the original set of pack IDs + """ + packs_id = {"Pack1", "Pack2", "Pack3"} + server_version = "6.9.0" + mocker.patch.object(script, 'get_packs_with_higher_min_version', return_value=set()) + + filtered_packs = script.filter_packs_by_min_server_version(packs_id, server_version, "") + + assert filtered_packs == packs_id diff --git a/Tests/Marketplace/search_and_install_packs.py b/Tests/Marketplace/search_and_install_packs.py index e8f8ad508d2d..3f0119bd8190 100644 --- a/Tests/Marketplace/search_and_install_packs.py +++ b/Tests/Marketplace/search_and_install_packs.py @@ -8,6 +8,7 @@ import re from functools import lru_cache from pathlib import Path +from tempfile import mkdtemp from typing import Any import networkx as nx from networkx import DiGraph @@ -26,17 +27,20 @@ GCPConfig, Metadata) from Tests.Marketplace.marketplace_services import (Pack, init_storage_client, load_json) -from Tests.Marketplace.upload_packs import download_and_extract_index +from Tests.Marketplace.upload_packs import download_and_extract_index, extract_packs_artifacts from Tests.scripts.utils import logging_wrapper as logging from demisto_sdk.commands.test_content.ParallelLoggingManager import ARTIFACTS_PATH +from Tests.test_content import get_server_numeric_version + PACK_PATH_VERSION_REGEX = re.compile(fr'^{GCPConfig.PRODUCTION_STORAGE_BASE_PATH}/[A-Za-z0-9-_.]+/(\d+\.\d+\.\d+)/[A-Za-z0-9-_.]' # noqa: E501 r'+\.zip$') WLM_TASK_FAILED_ERROR_CODE = 101704 GITLAB_SESSION = Session() CONTENT_PROJECT_ID = os.getenv('CI_PROJECT_ID', '1061') +ARTIFACTS_FOLDER_SERVER_TYPE = os.getenv('ARTIFACTS_FOLDER_SERVER_TYPE') PACKS_DIR = "Packs" PACK_METADATA_FILE = Pack.PACK_METADATA GITLAB_PACK_METADATA_URL = f'{{gitlab_url}}/api/v4/projects/{CONTENT_PROJECT_ID}/repository/files/{PACKS_DIR}%2F{{pack_id}}%2F{PACK_METADATA_FILE}' # noqa: E501 @@ -564,6 +568,40 @@ def search_and_install_packs_and_their_dependencies_private(test_pack_path: str, return install_packs_private(client, host, pack_ids, test_pack_path) +def get_json_file(path): + with open(path) as json_file: + return json.loads(json_file.read()) + + +def get_packs_with_higher_min_version(packs_names: set[str], + server_numeric_version: str, + extract_content_packs_path: str) -> set[str]: + """ + Return a set of packs that have higher min version than the server version. + + Args: + packs_names (Set[str]): A set of packs to install. + server_numeric_version (str): The server version. + extract_content_packs_path (str): Path to a temporary folder with extracted content packs metadata. + + Returns: + (Set[str]): The set of the packs names that supposed to be not installed because + their min version is greater than the server version. + """ + packs_with_higher_version = set() + for pack_name in packs_names: + pack_metadata = get_json_file(f"{extract_content_packs_path}/{pack_name}/metadata.json") + server_min_version = pack_metadata.get(Metadata.SERVER_MIN_VERSION, + pack_metadata.get('server_min_version', Metadata.SERVER_DEFAULT_MIN_VERSION)) + + if 'Master' not in server_numeric_version and Version(server_numeric_version) < Version(server_min_version): + packs_with_higher_version.add(pack_name) + logging.info(f"Skipping to install pack '{pack_name}' since the min version {server_min_version}, that is " + f"higher than server version {server_numeric_version}") + + return packs_with_higher_version + + def create_graph( all_packs_dependencies: dict, ) -> DiGraph: @@ -694,6 +732,10 @@ def get_one_page_of_packs_dependencies( body = { "page": page, "size": PAGE_SIZE_DEFAULT, + "sort": [ + {"field": "searchRank", "asc": False}, + {"field": "updated", "acs": False}, + ] } def success_handler(response): @@ -756,11 +798,44 @@ def search_for_deprecated_dependencies( return True +def filter_packs_by_min_server_version(packs_id: set[str], server_version: str, extract_content_packs_path: str): + """Filters a set of pack IDs to only those compatible with the given server version + + Args: + packs_id (set[str]): Set of pack IDs to filter + server_version (str): Server version to check pack compatibility against + extract_content_packs_path (str): Path to a temporary folder with extracted content packs metadata + + Returns: + set[str]: Set of pack IDs that are compatible with the provided server version + """ + packs_with_higher_server_version = get_packs_with_higher_min_version( + packs_names=packs_id, + server_numeric_version=server_version, + extract_content_packs_path=extract_content_packs_path + ) + return packs_id - packs_with_higher_server_version + + +def create_packs_artifacts(): + """Creates artifacts for content packs. + Extracts the content packs zip file into a temporary directory. + + Returns: + str: Path to the extracted content packs directory. + """ + extract_content_packs_path = mkdtemp() + packs_artifacts_path = f'{ARTIFACTS_FOLDER_SERVER_TYPE}/content_packs.zip' + extract_packs_artifacts(packs_artifacts_path, extract_content_packs_path) + return extract_content_packs_path + + def get_packs_and_dependencies_to_install( pack_ids: list, graph_dependencies: DiGraph, production_bucket: bool, all_packs_dependencies_data: dict, + client: DemistoClient, ) -> tuple[bool, set]: """ Fetches all dependencies for the given list of pack IDs and returns the packs and dependencies that should be installed. @@ -777,6 +852,8 @@ def get_packs_and_dependencies_to_install( """ no_deprecated_dependencies = True all_packs_and_dependencies_to_install: set[str] = set() + server_numeric_version = get_server_numeric_version(client) + extract_content_packs_path = create_packs_artifacts() for pack_id in pack_ids: dependencies_for_pack_id = nx.ancestors(graph_dependencies, pack_id) @@ -785,6 +862,9 @@ def get_packs_and_dependencies_to_install( logging.debug( f"Found dependencies for '{pack_id}': {dependencies_for_pack_id}" ) + dependencies_for_pack_id = filter_packs_by_min_server_version( + dependencies_for_pack_id, server_numeric_version, extract_content_packs_path + ) no_deprecated_dependency = search_for_deprecated_dependencies( pack_id, dependencies_for_pack_id, @@ -974,6 +1054,7 @@ def search_and_install_packs_and_their_dependencies( graph_dependencies, production_bucket, all_packs_dependencies_data, + client, ) success &= no_deprecated_dependencies diff --git a/Tests/scripts/convert_test_playbook_result_to_jira_issues.py b/Tests/scripts/convert_test_playbook_result_to_jira_issues.py index b6894f79a54e..de75b932eb6a 100644 --- a/Tests/scripts/convert_test_playbook_result_to_jira_issues.py +++ b/Tests/scripts/convert_test_playbook_result_to_jira_issues.py @@ -19,7 +19,7 @@ JIRA_PROJECT_ID, JIRA_ISSUE_TYPE, JIRA_COMPONENT, JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME, JIRA_LABELS, \ find_existing_jira_ticket, JIRA_ADDITIONAL_FIELDS, generate_ticket_summary, generate_build_markdown_link, \ jira_server_information, jira_search_all_by_query, generate_query_by_component_and_issue_type, jira_file_link, \ - jira_sanitize_file_name, jira_color_text + jira_sanitize_file_name, jira_color_text, transition_jira_ticket_to_unresolved from Tests.scripts.test_playbooks_report import calculate_test_playbooks_results, \ TEST_PLAYBOOKS_BASE_HEADERS, get_jira_tickets_for_playbooks, TEST_PLAYBOOKS_JIRA_BASE_HEADERS, \ write_test_playbook_to_jira_mapping, TEST_PLAYBOOKS_TO_JIRA_TICKETS_CONVERTED @@ -77,9 +77,12 @@ def create_jira_issue(jira_server: JIRA, ) -> Issue: summary = generate_ticket_summary(playbook_id) description = generate_description_for_test_playbook(playbook_id, build_number, junit_file_name, table_data, failed) - jira_issue, link_to_issue, use_existing_issue = find_existing_jira_ticket(jira_server, now, max_days_to_reopen, jira_issue) + jira_issue, link_to_issue, use_existing_issue, unresolved_transition_id = find_existing_jira_ticket(jira_server, now, + max_days_to_reopen, + jira_issue) if jira_issue is not None: + transition_jira_ticket_to_unresolved(jira_server, jira_issue, unresolved_transition_id) jira_server.add_comment(issue=jira_issue, body=description) else: jira_issue = jira_server.create_issue(project=JIRA_PROJECT_ID, diff --git a/Tests/scripts/find_pack_dependencies_changes.py b/Tests/scripts/find_pack_dependencies_changes.py index dda528a3f4ba..4f5b124e7d36 100644 --- a/Tests/scripts/find_pack_dependencies_changes.py +++ b/Tests/scripts/find_pack_dependencies_changes.py @@ -96,21 +96,29 @@ def get_diff(args: Namespace) -> dict: # pragma: no cover absolute_artifacts_folder = Path(args.artifacts_folder) relative_artifacts_folder = absolute_artifacts_folder.relative_to(ARTIFACTS_DIR_LOCATION) gitlab_client = GitlabClient(args.gitlab_token) - previous = gitlab_client.get_packs_dependencies_json( - args.master_sha, - args.job_name, - relative_artifacts_folder / PACKS_DEPENDENCIES_FILENAME, + master_packs_dependencies_json = json.loads( + gitlab_client.get_artifact_file( + args.master_sha, + args.job_name, + relative_artifacts_folder / PACKS_DEPENDENCIES_FILENAME, + ref="master", + ) ) - current = json.loads((absolute_artifacts_folder / PACKS_DEPENDENCIES_FILENAME).read_text()) - return compare(previous, current) + current_packs_dependencies_json = json.loads( + (absolute_artifacts_folder / PACKS_DEPENDENCIES_FILENAME).read_text() + ) + return compare(master_packs_dependencies_json, current_packs_dependencies_json) def main(): # pragma: no cover - args = parse_args() - diff = get_diff(args) - diff_file = Path(args.artifacts_folder) / DIFF_FILENAME - logger.info(f"Dumping the diff to an artifact file: {diff_file}") - diff_file.write_text(json.dumps(diff, indent=4)) + try: + args = parse_args() + diff = get_diff(args) + diff_file = Path(args.artifacts_folder) / DIFF_FILENAME + logger.info(f"Dumping the diff to an artifact file: {diff_file}") + diff_file.write_text(json.dumps(diff, indent=4)) + except Exception as e: + logger.warning(f"Skipping pack dependencies calculation: \n{e}") if __name__ == '__main__': diff --git a/Tests/scripts/get_previous_master_sha.sh b/Tests/scripts/get_previous_master_sha.sh old mode 100644 new mode 100755 diff --git a/Tests/scripts/gitlab_client.py b/Tests/scripts/gitlab_client.py index e16ac34fa78a..62f9cdbf69e4 100644 --- a/Tests/scripts/gitlab_client.py +++ b/Tests/scripts/gitlab_client.py @@ -1,5 +1,3 @@ -import enum -import json import os from tempfile import mkdtemp import zipfile @@ -14,13 +12,6 @@ PROJECT_ID = os.getenv("CI_PROJECT_ID", "1061") -class GetArtifactErrors(str, enum.Enum): - NO_PIPELINES = "No pipelines for this SHA" - NO_JOB = "No jobs with the specified name" - NO_ARTIFACTS = "No artifacts in the specified job" - NO_FILE_IN_ARTIFACTS = "The specified file does not exist in the artifacts" - - class GitlabClient: def __init__(self, gitlab_token: str) -> None: self.base_url = f"{API_BASE_URL}/projects/{PROJECT_ID}" @@ -40,8 +31,18 @@ def _get( return response.json() return response - def get_pipelines_by_sha(self, commit_sha: str) -> list: - return self._get(f"pipelines?sha={commit_sha}", to_json=True) + def get_pipelines( + self, + commit_sha: str = None, + ref: str = None, + sort: str = "asc", + ) -> list: + params = { + "sha": commit_sha, + "ref": ref, + "sort": sort, + } + return self._get("pipelines", params=params, to_json=True) def get_job_id_by_name(self, pipeline_id: str, job_name: str) -> str | None: response: list = self._get(f"pipelines/{pipeline_id}/jobs", to_json=True) @@ -71,6 +72,7 @@ def get_artifact_file( commit_sha: str, job_name: str, artifact_filepath: Path, + ref: str = None, ) -> str: """Gets an artifact file data as text. @@ -78,38 +80,35 @@ def get_artifact_file( commit_sha (str): A commit SHA job_name (str): A job name artifact_filepath (Path): The artifact file path + ref (str): The branch name. Raises: - Exception: An exception message specifying the reasons for not returning the file data. + Exception: An exception message specifying the reasons for not returning the file data, + for each pipeline triggered for the given commit SHA. Returns: str: The artifact text data. """ - pipeline_ids = [p["id"] for p in self.get_pipelines_by_sha(commit_sha)] - pid_to_err = {} - for pipeline_id in pipeline_ids: - if job_id := self.get_job_id_by_name(pipeline_id, job_name): - try: - bundle_path = self.download_and_extract_artifacts_bundle(job_id) - return (bundle_path / artifact_filepath).read_text() - except requests.HTTPError: - pid_to_err[pipeline_id] = GetArtifactErrors.NO_ARTIFACTS.value - except FileNotFoundError: - pid_to_err[pipeline_id] = GetArtifactErrors.NO_FILE_IN_ARTIFACTS.value - else: - pid_to_err[pipeline_id] = GetArtifactErrors.NO_JOB.value - - raise Exception( - f"Could not extract {artifact_filepath.name} from any pipeline of SHA {commit_sha}. " - f"Err: {GetArtifactErrors.NO_PIPELINES.value if not pipeline_ids else pid_to_err}" - ) - - def get_packs_dependencies_json( - self, - commit_sha: str, - job_name: str, - packs_dependencies_filepath: Path, - ) -> dict: - return json.loads( - self.get_artifact_file(commit_sha, job_name, packs_dependencies_filepath) - ) + try: + pipelines = self.get_pipelines(commit_sha=commit_sha, ref=ref) + if not pipelines: + raise Exception("No pipelines found for this SHA") + errors = [] + for pipeline in pipelines: + pid = pipeline["id"] + if job_id := self.get_job_id_by_name(pid, job_name): + try: + bundle_path = self.download_and_extract_artifacts_bundle(job_id) + return (bundle_path / artifact_filepath).read_text() + except requests.HTTPError: + errors.append(f"Pipeline #{pid}: No artifacts in job {job_name}") + except FileNotFoundError: + errors.append(f"Pipeline #{pid}: The file {artifact_filepath} does not exist in the artifacts") + else: + errors.append(f"Pipeline #{pid}: No job with the name {job_name}") + raise Exception("\n".join(errors)) + + except Exception as e: + raise Exception( + f"Could not extract {artifact_filepath.name} from any pipeline with SHA {commit_sha}:\n{e}" + ) diff --git a/Tests/scripts/infrastructure_tests/gitlab_client_test.py b/Tests/scripts/infrastructure_tests/gitlab_client_test.py index 33fc931f7aca..43e729505971 100644 --- a/Tests/scripts/infrastructure_tests/gitlab_client_test.py +++ b/Tests/scripts/infrastructure_tests/gitlab_client_test.py @@ -6,7 +6,7 @@ import pytest -from Tests.scripts.gitlab_client import GitlabClient, GetArtifactErrors +from Tests.scripts.gitlab_client import GitlabClient SHA = "mock_sha" @@ -34,7 +34,7 @@ def mock_artifacts_api_response( return mock_bytes.getvalue() -def test_get_packs_dependencies( +def test_get_artifact_file( client: GitlabClient, requests_mock, ) -> None: @@ -42,15 +42,15 @@ def test_get_packs_dependencies( Given: - A Gitlab Client - A Commit SHA - - The job name in which a packs_dependencies.json should be stored as an artifact + - The job name in which a packs_dependencies.json file should be stored as an artifact When: - - Calling get_packs_dependencies_json() + - Calling get_artifact_file() Then: - Ensure the response is the expected data. """ packs_dependencies_json: dict = {} requests_mock.get( - f"{client.base_url}/pipelines?sha={SHA}", + f"{client.base_url}/pipelines", json=[{"id": "mock_pipeline_id"}], ) requests_mock.get( @@ -61,11 +61,11 @@ def test_get_packs_dependencies( f"{client.base_url}/jobs/mock_job_id/artifacts", content=mock_artifacts_api_response(packs_dependencies_json), ) - assert client.get_packs_dependencies_json( + assert json.loads(client.get_artifact_file( SHA, JOB_NAME, PACKS_DEPENDENCIES_FILEPATH, - ) == packs_dependencies_json + )) == packs_dependencies_json @pytest.mark.parametrize( @@ -75,54 +75,54 @@ def test_get_packs_dependencies( [], None, None, - GetArtifactErrors.NO_PIPELINES, + "No pipelines", id="No Pipelines", ), pytest.param( [{"id": "mock_pipeline_id"}], [{"id": "mock_job_id", "name": "some_job"}], None, - GetArtifactErrors.NO_JOB, + "No job", id="No Job", ), pytest.param( [{"id": "mock_pipeline_id"}], [{"id": "mock_job_id", "name": JOB_NAME}], {"status_code": 404}, - GetArtifactErrors.NO_ARTIFACTS, + "No artifacts", id="No artifacts", ), pytest.param( [{"id": "mock_pipeline_id"}], [{"id": "mock_job_id", "name": JOB_NAME}], {"content": mock_artifacts_api_response(data=None)}, - GetArtifactErrors.NO_FILE_IN_ARTIFACTS, + "does not exist in the artifacts", id="No pack_dependencies.json file in artifacts", ), ] ) -def test_get_packs_dependencies_bad( +def test_get_artifact_file_bad( client: GitlabClient, requests_mock: Any, pipelines_mock_response: list | None, jobs_mock_response: list | None, artifacts_mock_repsonse: dict | None, - expected_err: GetArtifactErrors, + expected_err: str, ) -> None: """ Given: - A Gitlab Client - A Commit SHA - - The job name in which a packs_dependencies.json should be stored as an artifact + - The job name in which a packs_dependencies.json file should be stored as an artifact - A marketplace version - Test cases for different Gitlab API responses. When: - - Calling get_packs_dependencies_json() + - Calling get_artifact_file() Then: - Ensure an exception is raised for all test cases. """ requests_mock.get( - f"{client.base_url}/pipelines?sha={SHA}", + f"{client.base_url}/pipelines", json=pipelines_mock_response, ) requests_mock.get( @@ -135,9 +135,9 @@ def test_get_packs_dependencies_bad( **artifacts_mock_repsonse, ) with pytest.raises(Exception) as e: - client.get_packs_dependencies_json( + client.get_artifact_file( SHA, JOB_NAME, PACKS_DEPENDENCIES_FILEPATH, ) - assert expected_err.value in str(e) + assert expected_err in str(e) diff --git a/Tests/scripts/jira_issues.py b/Tests/scripts/jira_issues.py index 90f5550c51b8..64b4ad8bb352 100644 --- a/Tests/scripts/jira_issues.py +++ b/Tests/scripts/jira_issues.py @@ -62,13 +62,26 @@ def jira_color_text(text: str, color: str) -> str: return f"{{color:{color}}}{text}{{color}}" +def get_transition(jira_server, jira_issue) -> str | None: + transitions = jira_server.transitions(jira_issue) + unresolved_transition = next(filter(lambda transition: transition['name'] == JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME, + transitions), None) + return unresolved_transition['id'] if unresolved_transition else None + + +def transition_jira_ticket_to_unresolved(jira_server: JIRA, jira_issue: Issue | None, unresolved_transition_id: str | None): + if unresolved_transition_id: + jira_server.transition_issue(jira_issue, unresolved_transition_id) + + def find_existing_jira_ticket(jira_server: JIRA, now: datetime, max_days_to_reopen: int, jira_issue: Issue | None, - ) -> tuple[Issue | None, Issue | None, bool]: + ) -> tuple[Issue | None, Issue | None, bool, str | None]: link_to_issue = None jira_issue_to_use = None + unresolved_transition_id = None if use_existing_issue := (jira_issue is not None): searched_issue: Issue = jira_issue if searched_issue.get_field("resolution"): @@ -76,18 +89,7 @@ def find_existing_jira_ticket(jira_server: JIRA, if use_existing_issue := (resolution_date and (now - resolution_date) <= timedelta(days=max_days_to_reopen)): # type: ignore[assignment] - - # Get the available transitions for the issue - transitions = jira_server.transitions(searched_issue) - - # Find the transition with the specified ID - unresolved_transition = None - for transition in transitions: - if transition['name'] == JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME: - unresolved_transition = transition - break - if unresolved_transition: - jira_server.transition_issue(searched_issue, unresolved_transition['id']) + if unresolved_transition_id := get_transition(jira_server, jira_issue): jira_issue_to_use = searched_issue else: logging.error(f"Failed to find the '{JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME}' " @@ -95,12 +97,11 @@ def find_existing_jira_ticket(jira_server: JIRA, jira_issue_to_use = None use_existing_issue = False link_to_issue = searched_issue - else: link_to_issue = searched_issue else: jira_issue_to_use = searched_issue - return jira_issue_to_use, link_to_issue, use_existing_issue + return jira_issue_to_use, link_to_issue, use_existing_issue, unresolved_transition_id def generate_build_markdown_link(ci_pipeline_id: str) -> str: diff --git a/Tests/scripts/test_modeling_rule_report.py b/Tests/scripts/test_modeling_rule_report.py index 4a0c00e04c93..c5f65b0f3811 100644 --- a/Tests/scripts/test_modeling_rule_report.py +++ b/Tests/scripts/test_modeling_rule_report.py @@ -14,7 +14,7 @@ from Tests.scripts.jira_issues import generate_ticket_summary, generate_query_with_summary, \ find_existing_jira_ticket, JIRA_PROJECT_ID, JIRA_ISSUE_TYPE, JIRA_COMPONENT, JIRA_LABELS, JIRA_ADDITIONAL_FIELDS, \ generate_build_markdown_link, convert_jira_time_to_datetime, jira_ticket_to_json_data, jira_file_link, \ - jira_sanitize_file_name, jira_color_text + jira_sanitize_file_name, jira_color_text, transition_jira_ticket_to_unresolved from Tests.scripts.utils import logging_wrapper as logging TEST_MODELING_RULES_BASE_HEADERS = ["Test Modeling Rule"] @@ -42,14 +42,16 @@ def create_jira_issue_for_test_modeling_rule(jira_server: JIRA, summary = generate_ticket_summary(get_summary_for_test_modeling_rule(properties)) # type: ignore[arg-type] jql_query = generate_query_with_summary(summary) search_issues: ResultList[Issue] = jira_server.search_issues(jql_query, maxResults=1) # type: ignore[assignment] - jira_issue, link_to_issue, use_existing_issue = find_existing_jira_ticket(jira_server, now, max_days_to_reopen, - search_issues[0] if search_issues else None) + jira_issue, link_to_issue, use_existing_issue, \ + unresolved_transition_id = find_existing_jira_ticket(jira_server, now, max_days_to_reopen, + search_issues[0] if search_issues else None) if jira_issue is not None: if test_suite.failures == 0 and test_suite.errors == 0 and (resolution := jira_issue.get_field("resolution")) is not None: logging.info(f"Skipping updating Jira issue {jira_issue.key} as it has no failures or errors " f"and the Jira ticket is resolved with resolution:{resolution}") return None + transition_jira_ticket_to_unresolved(jira_server, jira_issue, unresolved_transition_id) jira_server.add_comment(issue=jira_issue, body=description) else: if test_suite.failures == 0 and test_suite.errors == 0: diff --git a/Tests/scripts/validate_index.py b/Tests/scripts/validate_index.py index 2a3383965590..cfae842fb516 100644 --- a/Tests/scripts/validate_index.py +++ b/Tests/scripts/validate_index.py @@ -159,7 +159,6 @@ def main(): if not all([index_is_valid, commit_hash_is_valid]): logging.critical("Index content is invalid. Aborting.") - exit_code = 1 # Deleting GCS PATH before exit if exit_code == 1 and os.path.exists(options.service_account): diff --git a/Tests/scripts/validate_premium_packs.py b/Tests/scripts/validate_premium_packs.py index ee6fc13b9f50..038fb8e13fca 100644 --- a/Tests/scripts/validate_premium_packs.py +++ b/Tests/scripts/validate_premium_packs.py @@ -244,11 +244,8 @@ def main(): log_message_if_statement(statement=paid_packs_are_identical, error_message=f"Test failed on host: {server.internal_ip}.", success_message=f"All premium packs in host: {server.internal_ip} are valid") - if not paid_packs_are_identical: - exit_code = 1 else: logging.critical(f"Missing all premium packs in host: {server.internal_ip}") - exit_code = 1 # Deleting GCS PATH before exit if os.path.exists(options.service_account): diff --git a/Tests/scripts/validate_premium_packs.sh b/Tests/scripts/validate_premium_packs.sh index 58ee231c0847..fd12041e20c9 100644 --- a/Tests/scripts/validate_premium_packs.sh +++ b/Tests/scripts/validate_premium_packs.sh @@ -1,5 +1,7 @@ #!/usr/bin/env bash +# THIS VALIDATION WILL BE REMOVED IN CIAC-9611 + # exit on errors set -e diff --git a/Tests/scripts/wait_until_server_ready.py b/Tests/scripts/wait_until_server_ready.py index 6ecbca6508fd..98f661c61da1 100644 --- a/Tests/scripts/wait_until_server_ready.py +++ b/Tests/scripts/wait_until_server_ready.py @@ -40,8 +40,8 @@ def docker_login(ip: str) -> None: Args: ip: The ip of the server that should be logged in """ - docker_username = os.environ.get('DOCKER_READ_ONLY_USER') - docker_password = os.environ.get('DOCKER_READ_ONLY_PASSWORD') or '' + docker_username = os.environ.get('DOCKERHUB_XSOAR_READONLY_USERNAME') + docker_password = os.environ.get('DOCKERHUB_XSOAR_READONLY_PASSWORD') or '' container_engine_type = 'docker' try: check_output( diff --git a/Tests/test_content.py b/Tests/test_content.py index 7252233fe86e..ac2a12356589 100644 --- a/Tests/test_content.py +++ b/Tests/test_content.py @@ -9,6 +9,7 @@ from typing import Any from collections.abc import Generator import demisto_client +from demisto_client.demisto_api.api.default_api import DefaultApi as DemistoClient import pytz import requests @@ -311,11 +312,11 @@ def load_env_results_json(): return json.load(json_file) -def get_server_numeric_version(client: demisto_client, is_local_run=False) -> str: +def get_server_numeric_version(client: DemistoClient, is_local_run=False) -> str: """ Gets the current server version Arguments: - client: (demisto_client): the demisto client + client: (DemistoClient): the demisto client is_local_run: (bool) when running locally, assume latest version. Returns: diff --git a/poetry.lock b/poetry.lock index 91797038d3a2..1800bc6932ae 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1354,13 +1354,13 @@ urllib3 = ">=1.26.7" [[package]] name = "demisto-sdk" -version = "1.26.0" +version = "1.26.1" description = "\"A Python library for the Demisto SDK\"" optional = false python-versions = ">=3.8,<3.11" files = [ - {file = "demisto_sdk-1.26.0-py3-none-any.whl", hash = "sha256:a65d453cc4339cf2e8c41eb6f73d37f317878ce82d83e1f19ca5a30c8d74360c"}, - {file = "demisto_sdk-1.26.0.tar.gz", hash = "sha256:bd8c7a609822550ce882efeffff5f62d44923fc157ec99a6c93232ac84d4bc4d"}, + {file = "demisto_sdk-1.26.1-py3-none-any.whl", hash = "sha256:d2a638d70ba1b5484823c61a22894e5fd68b1d474feb9fd7b0e962d7131674f0"}, + {file = "demisto_sdk-1.26.1.tar.gz", hash = "sha256:ee697ce36eed1feab8f711c7d50e4e6d2751f39f5d8296bfe6f6ea9ef782f9ee"}, ] [package.dependencies] @@ -7127,4 +7127,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.8,<3.11" -content-hash = "1ff0d1d56292b46cb020db60116ef90ee5cfde73582540e7bf7c1c26dc5a1fb3" +content-hash = "68794f48ec449516fd10dc003af8c85c2badd45478c20d28c375c482235c8de8" diff --git a/pyproject.toml b/pyproject.toml index 788a2566f0dc..1fe74114d6a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ python = "^3.8,<3.11" defusedxml = "^0.7.1" [tool.poetry.group.dev.dependencies] -demisto-sdk = "1.26.0" +demisto-sdk = "1.26.1" requests = "^2.22.0" pre-commit = "^3.5.0" google-cloud-compute = "^1.8.0"